1 /* OpenMP directive translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2005-2021 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "gimple-expr.h"
30 #include "stringpool.h"
31 #include "fold-const.h"
32 #include "gimplify.h" /* For create_tmp_var_raw. */
33 #include "trans-stmt.h"
34 #include "trans-types.h"
35 #include "trans-array.h"
36 #include "trans-const.h"
38 #include "constructor.h"
39 #include "gomp-constants.h"
40 #include "omp-general.h"
42 #include "memmodel.h" /* For MEMMODEL_ enums. */
45 #define GCC_DIAG_STYLE __gcc_tdiag__
46 #include "diagnostic-core.h"
48 #define GCC_DIAG_STYLE __gcc_gfc__
54 /* True if OpenMP should regard this DECL as being a scalar which has Fortran's
55 allocatable or pointer attribute. */
58 gfc_omp_is_allocatable_or_ptr (const_tree decl
)
61 && (GFC_DECL_GET_SCALAR_POINTER (decl
)
62 || GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)));
65 /* True if the argument is an optional argument; except that false is also
66 returned for arguments with the value attribute (nonpointers) and for
67 assumed-shape variables (decl is a local variable containing arg->data).
68 Note that for 'procedure(), optional' the value false is used as that's
69 always a pointer and no additional indirection is used.
70 Note that pvoid_type_node is for 'type(c_ptr), value' (and c_funloc). */
73 gfc_omp_is_optional_argument (const_tree decl
)
75 return (TREE_CODE (decl
) == PARM_DECL
76 && DECL_LANG_SPECIFIC (decl
)
77 && TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
78 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl
)))
79 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) != FUNCTION_TYPE
80 && GFC_DECL_OPTIONAL_ARGUMENT (decl
));
83 /* Check whether this DECL belongs to a Fortran optional argument.
84 With 'for_present_check' set to false, decls which are optional parameters
85 themselve are returned as tree - or a NULL_TREE otherwise. Those decls are
86 always pointers. With 'for_present_check' set to true, the decl for checking
87 whether an argument is present is returned; for arguments with value
88 attribute this is the hidden argument and of BOOLEAN_TYPE. If the decl is
89 unrelated to optional arguments, NULL_TREE is returned. */
92 gfc_omp_check_optional_argument (tree decl
, bool for_present_check
)
94 if (!for_present_check
)
95 return gfc_omp_is_optional_argument (decl
) ? decl
: NULL_TREE
;
97 if (!DECL_LANG_SPECIFIC (decl
))
100 tree orig_decl
= decl
;
102 /* For assumed-shape arrays, a local decl with arg->data is used. */
103 if (TREE_CODE (decl
) != PARM_DECL
104 && (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
))
105 || GFC_ARRAY_TYPE_P (TREE_TYPE (decl
))))
106 decl
= GFC_DECL_SAVED_DESCRIPTOR (decl
);
108 if (decl
== NULL_TREE
109 || TREE_CODE (decl
) != PARM_DECL
110 || !DECL_LANG_SPECIFIC (decl
)
111 || !GFC_DECL_OPTIONAL_ARGUMENT (decl
))
114 /* Scalars with VALUE attribute which are passed by value use a hidden
115 argument to denote the present status. They are passed as nonpointer type
116 with one exception: 'type(c_ptr), value' as 'void*'. */
117 /* Cf. trans-expr.c's gfc_conv_expr_present. */
118 if (TREE_CODE (TREE_TYPE (decl
)) != POINTER_TYPE
119 || VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
121 char name
[GFC_MAX_SYMBOL_LEN
+ 2];
125 strcpy (&name
[1], IDENTIFIER_POINTER (DECL_NAME (decl
)));
126 tree_name
= get_identifier (name
);
128 /* Walk function argument list to find the hidden arg. */
129 decl
= DECL_ARGUMENTS (DECL_CONTEXT (decl
));
130 for ( ; decl
!= NULL_TREE
; decl
= TREE_CHAIN (decl
))
131 if (DECL_NAME (decl
) == tree_name
132 && DECL_ARTIFICIAL (decl
))
139 return fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
140 orig_decl
, null_pointer_node
);
144 /* Returns tree with NULL if it is not an array descriptor and with the tree to
145 access the 'data' component otherwise. With type_only = true, it returns the
146 TREE_TYPE without creating a new tree. */
149 gfc_omp_array_data (tree decl
, bool type_only
)
151 tree type
= TREE_TYPE (decl
);
153 if (POINTER_TYPE_P (type
))
154 type
= TREE_TYPE (type
);
156 if (!GFC_DESCRIPTOR_TYPE_P (type
))
160 return GFC_TYPE_ARRAY_DATAPTR_TYPE (type
);
162 if (POINTER_TYPE_P (TREE_TYPE (decl
)))
163 decl
= build_fold_indirect_ref (decl
);
165 decl
= gfc_conv_descriptor_data_get (decl
);
170 /* True if OpenMP should privatize what this DECL points to rather
171 than the DECL itself. */
174 gfc_omp_privatize_by_reference (const_tree decl
)
176 tree type
= TREE_TYPE (decl
);
178 if (TREE_CODE (type
) == REFERENCE_TYPE
179 && (!DECL_ARTIFICIAL (decl
) || TREE_CODE (decl
) == PARM_DECL
))
182 if (TREE_CODE (type
) == POINTER_TYPE
183 && gfc_omp_is_optional_argument (decl
))
186 if (TREE_CODE (type
) == POINTER_TYPE
)
188 while (TREE_CODE (decl
) == COMPONENT_REF
)
189 decl
= TREE_OPERAND (decl
, 1);
191 /* Array POINTER/ALLOCATABLE have aggregate types, all user variables
192 that have POINTER_TYPE type and aren't scalar pointers, scalar
193 allocatables, Cray pointees or C pointers are supposed to be
194 privatized by reference. */
195 if (GFC_DECL_GET_SCALAR_POINTER (decl
)
196 || GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
197 || GFC_DECL_CRAY_POINTEE (decl
)
198 || GFC_DECL_ASSOCIATE_VAR_P (decl
)
199 || VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
202 if (!DECL_ARTIFICIAL (decl
)
203 && TREE_CODE (TREE_TYPE (type
)) != FUNCTION_TYPE
)
206 /* Some arrays are expanded as DECL_ARTIFICIAL pointers
208 if (DECL_LANG_SPECIFIC (decl
)
209 && GFC_DECL_SAVED_DESCRIPTOR (decl
))
216 /* OMP_CLAUSE_DEFAULT_UNSPECIFIED unless OpenMP sharing attribute
217 of DECL is predetermined. */
219 enum omp_clause_default_kind
220 gfc_omp_predetermined_sharing (tree decl
)
222 /* Associate names preserve the association established during ASSOCIATE.
223 As they are implemented either as pointers to the selector or array
224 descriptor and shouldn't really change in the ASSOCIATE region,
225 this decl can be either shared or firstprivate. If it is a pointer,
226 use firstprivate, as it is cheaper that way, otherwise make it shared. */
227 if (GFC_DECL_ASSOCIATE_VAR_P (decl
))
229 if (TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
)
230 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
232 return OMP_CLAUSE_DEFAULT_SHARED
;
235 if (DECL_ARTIFICIAL (decl
)
236 && ! GFC_DECL_RESULT (decl
)
237 && ! (DECL_LANG_SPECIFIC (decl
)
238 && GFC_DECL_SAVED_DESCRIPTOR (decl
)))
239 return OMP_CLAUSE_DEFAULT_SHARED
;
241 /* Cray pointees shouldn't be listed in any clauses and should be
242 gimplified to dereference of the corresponding Cray pointer.
243 Make them all private, so that they are emitted in the debug
245 if (GFC_DECL_CRAY_POINTEE (decl
))
246 return OMP_CLAUSE_DEFAULT_PRIVATE
;
248 /* Assumed-size arrays are predetermined shared. */
249 if (TREE_CODE (decl
) == PARM_DECL
250 && GFC_ARRAY_TYPE_P (TREE_TYPE (decl
))
251 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl
)) == GFC_ARRAY_UNKNOWN
252 && GFC_TYPE_ARRAY_UBOUND (TREE_TYPE (decl
),
253 GFC_TYPE_ARRAY_RANK (TREE_TYPE (decl
)) - 1)
255 return OMP_CLAUSE_DEFAULT_SHARED
;
257 /* Dummy procedures aren't considered variables by OpenMP, thus are
258 disallowed in OpenMP clauses. They are represented as PARM_DECLs
259 in the middle-end, so return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE here
260 to avoid complaining about their uses with default(none). */
261 if (TREE_CODE (decl
) == PARM_DECL
262 && TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
263 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == FUNCTION_TYPE
)
264 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
266 /* COMMON and EQUIVALENCE decls are shared. They
267 are only referenced through DECL_VALUE_EXPR of the variables
268 contained in them. If those are privatized, they will not be
269 gimplified to the COMMON or EQUIVALENCE decls. */
270 if (GFC_DECL_COMMON_OR_EQUIV (decl
) && ! DECL_HAS_VALUE_EXPR_P (decl
))
271 return OMP_CLAUSE_DEFAULT_SHARED
;
273 if (GFC_DECL_RESULT (decl
) && ! DECL_HAS_VALUE_EXPR_P (decl
))
274 return OMP_CLAUSE_DEFAULT_SHARED
;
276 /* These are either array or derived parameters, or vtables.
277 In the former cases, the OpenMP standard doesn't consider them to be
278 variables at all (they can't be redefined), but they can nevertheless appear
279 in parallel/task regions and for default(none) purposes treat them as shared.
280 For vtables likely the same handling is desirable. */
281 if (VAR_P (decl
) && TREE_READONLY (decl
)
282 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
283 return OMP_CLAUSE_DEFAULT_SHARED
;
285 return OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
289 /* OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED unless OpenMP mapping attribute
290 of DECL is predetermined. */
292 enum omp_clause_defaultmap_kind
293 gfc_omp_predetermined_mapping (tree decl
)
295 if (DECL_ARTIFICIAL (decl
)
296 && ! GFC_DECL_RESULT (decl
)
297 && ! (DECL_LANG_SPECIFIC (decl
)
298 && GFC_DECL_SAVED_DESCRIPTOR (decl
)))
299 return OMP_CLAUSE_DEFAULTMAP_TO
;
301 /* These are either array or derived parameters, or vtables. */
302 if (VAR_P (decl
) && TREE_READONLY (decl
)
303 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
304 return OMP_CLAUSE_DEFAULTMAP_TO
;
306 return OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED
;
310 /* Return decl that should be used when reporting DEFAULT(NONE)
314 gfc_omp_report_decl (tree decl
)
316 if (DECL_ARTIFICIAL (decl
)
317 && DECL_LANG_SPECIFIC (decl
)
318 && GFC_DECL_SAVED_DESCRIPTOR (decl
))
319 return GFC_DECL_SAVED_DESCRIPTOR (decl
);
324 /* Return true if TYPE has any allocatable components. */
327 gfc_has_alloc_comps (tree type
, tree decl
)
331 if (POINTER_TYPE_P (type
))
333 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
))
334 type
= TREE_TYPE (type
);
335 else if (GFC_DECL_GET_SCALAR_POINTER (decl
))
339 if (GFC_DESCRIPTOR_TYPE_P (type
)
340 && (GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_POINTER
341 || GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_POINTER_CONT
))
344 if (GFC_DESCRIPTOR_TYPE_P (type
) || GFC_ARRAY_TYPE_P (type
))
345 type
= gfc_get_element_type (type
);
347 if (TREE_CODE (type
) != RECORD_TYPE
)
350 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
352 ftype
= TREE_TYPE (field
);
353 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
355 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
356 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
358 if (gfc_has_alloc_comps (ftype
, field
))
364 /* Return true if TYPE is polymorphic but not with pointer attribute. */
367 gfc_is_polymorphic_nonptr (tree type
)
369 if (POINTER_TYPE_P (type
))
370 type
= TREE_TYPE (type
);
371 return GFC_CLASS_TYPE_P (type
);
374 /* Return true if TYPE is unlimited polymorphic but not with pointer attribute;
375 unlimited means also intrinsic types are handled and _len is used. */
378 gfc_is_unlimited_polymorphic_nonptr (tree type
)
380 if (POINTER_TYPE_P (type
))
381 type
= TREE_TYPE (type
);
382 if (!GFC_CLASS_TYPE_P (type
))
385 tree field
= TYPE_FIELDS (type
); /* _data */
387 field
= DECL_CHAIN (field
); /* _vptr */
389 field
= DECL_CHAIN (field
);
392 gcc_assert (strcmp ("_len", IDENTIFIER_POINTER (DECL_NAME (field
))) == 0);
396 /* Return true if the DECL is for an allocatable array or scalar. */
399 gfc_omp_allocatable_p (tree decl
)
404 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
))
407 tree type
= TREE_TYPE (decl
);
408 if (gfc_omp_privatize_by_reference (decl
))
409 type
= TREE_TYPE (type
);
411 if (GFC_DESCRIPTOR_TYPE_P (type
)
412 && GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_ALLOCATABLE
)
419 /* Return true if DECL in private clause needs
420 OMP_CLAUSE_PRIVATE_OUTER_REF on the private clause. */
422 gfc_omp_private_outer_ref (tree decl
)
424 tree type
= TREE_TYPE (decl
);
426 if (gfc_omp_privatize_by_reference (decl
))
427 type
= TREE_TYPE (type
);
429 if (GFC_DESCRIPTOR_TYPE_P (type
)
430 && GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_ALLOCATABLE
)
433 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
))
436 if (gfc_has_alloc_comps (type
, decl
))
442 /* Callback for gfc_omp_unshare_expr. */
445 gfc_omp_unshare_expr_r (tree
*tp
, int *walk_subtrees
, void *)
448 enum tree_code code
= TREE_CODE (t
);
450 /* Stop at types, decls, constants like copy_tree_r. */
451 if (TREE_CODE_CLASS (code
) == tcc_type
452 || TREE_CODE_CLASS (code
) == tcc_declaration
453 || TREE_CODE_CLASS (code
) == tcc_constant
456 else if (handled_component_p (t
)
457 || TREE_CODE (t
) == MEM_REF
)
459 *tp
= unshare_expr (t
);
466 /* Unshare in expr anything that the FE which normally doesn't
467 care much about tree sharing (because during gimplification
468 everything is unshared) could cause problems with tree sharing
469 at omp-low.c time. */
472 gfc_omp_unshare_expr (tree expr
)
474 walk_tree (&expr
, gfc_omp_unshare_expr_r
, NULL
, NULL
);
478 enum walk_alloc_comps
480 WALK_ALLOC_COMPS_DTOR
,
481 WALK_ALLOC_COMPS_DEFAULT_CTOR
,
482 WALK_ALLOC_COMPS_COPY_CTOR
485 /* Handle allocatable components in OpenMP clauses. */
488 gfc_walk_alloc_comps (tree decl
, tree dest
, tree var
,
489 enum walk_alloc_comps kind
)
491 stmtblock_t block
, tmpblock
;
492 tree type
= TREE_TYPE (decl
), then_b
, tem
, field
;
493 gfc_init_block (&block
);
495 if (GFC_ARRAY_TYPE_P (type
) || GFC_DESCRIPTOR_TYPE_P (type
))
497 if (GFC_DESCRIPTOR_TYPE_P (type
))
499 gfc_init_block (&tmpblock
);
500 tem
= gfc_full_array_size (&tmpblock
, decl
,
501 GFC_TYPE_ARRAY_RANK (type
));
502 then_b
= gfc_finish_block (&tmpblock
);
503 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (then_b
));
504 tem
= gfc_omp_unshare_expr (tem
);
505 tem
= fold_build2_loc (input_location
, MINUS_EXPR
,
506 gfc_array_index_type
, tem
,
511 bool compute_nelts
= false;
512 if (!TYPE_DOMAIN (type
)
513 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == NULL_TREE
514 || TYPE_MIN_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
515 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
)
516 compute_nelts
= true;
517 else if (VAR_P (TYPE_MAX_VALUE (TYPE_DOMAIN (type
))))
519 tree a
= DECL_ATTRIBUTES (TYPE_MAX_VALUE (TYPE_DOMAIN (type
)));
520 if (lookup_attribute ("omp dummy var", a
))
521 compute_nelts
= true;
525 tem
= fold_build2 (EXACT_DIV_EXPR
, sizetype
,
526 TYPE_SIZE_UNIT (type
),
527 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
528 tem
= size_binop (MINUS_EXPR
, tem
, size_one_node
);
531 tem
= array_type_nelts (type
);
532 tem
= fold_convert (gfc_array_index_type
, tem
);
535 tree nelems
= gfc_evaluate_now (tem
, &block
);
536 tree index
= gfc_create_var (gfc_array_index_type
, "S");
538 gfc_init_block (&tmpblock
);
539 tem
= gfc_conv_array_data (decl
);
540 tree declvar
= build_fold_indirect_ref_loc (input_location
, tem
);
541 tree declvref
= gfc_build_array_ref (declvar
, index
, NULL
);
542 tree destvar
, destvref
= NULL_TREE
;
545 tem
= gfc_conv_array_data (dest
);
546 destvar
= build_fold_indirect_ref_loc (input_location
, tem
);
547 destvref
= gfc_build_array_ref (destvar
, index
, NULL
);
549 gfc_add_expr_to_block (&tmpblock
,
550 gfc_walk_alloc_comps (declvref
, destvref
,
554 gfc_init_loopinfo (&loop
);
556 loop
.from
[0] = gfc_index_zero_node
;
557 loop
.loopvar
[0] = index
;
559 gfc_trans_scalarizing_loops (&loop
, &tmpblock
);
560 gfc_add_block_to_block (&block
, &loop
.pre
);
561 return gfc_finish_block (&block
);
563 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (var
))
565 decl
= build_fold_indirect_ref_loc (input_location
, decl
);
567 dest
= build_fold_indirect_ref_loc (input_location
, dest
);
568 type
= TREE_TYPE (decl
);
571 gcc_assert (TREE_CODE (type
) == RECORD_TYPE
);
572 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
574 tree ftype
= TREE_TYPE (field
);
575 tree declf
, destf
= NULL_TREE
;
576 bool has_alloc_comps
= gfc_has_alloc_comps (ftype
, field
);
577 if ((!GFC_DESCRIPTOR_TYPE_P (ftype
)
578 || GFC_TYPE_ARRAY_AKIND (ftype
) != GFC_ARRAY_ALLOCATABLE
)
579 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (field
)
582 declf
= fold_build3_loc (input_location
, COMPONENT_REF
, ftype
,
583 decl
, field
, NULL_TREE
);
585 destf
= fold_build3_loc (input_location
, COMPONENT_REF
, ftype
,
586 dest
, field
, NULL_TREE
);
591 case WALK_ALLOC_COMPS_DTOR
:
593 case WALK_ALLOC_COMPS_DEFAULT_CTOR
:
594 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
595 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
597 gfc_add_modify (&block
, unshare_expr (destf
),
598 unshare_expr (declf
));
599 tem
= gfc_duplicate_allocatable_nocopy
600 (destf
, declf
, ftype
,
601 GFC_TYPE_ARRAY_RANK (ftype
));
603 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
604 tem
= gfc_duplicate_allocatable_nocopy (destf
, declf
, ftype
, 0);
606 case WALK_ALLOC_COMPS_COPY_CTOR
:
607 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
608 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
609 tem
= gfc_duplicate_allocatable (destf
, declf
, ftype
,
610 GFC_TYPE_ARRAY_RANK (ftype
),
612 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
613 tem
= gfc_duplicate_allocatable (destf
, declf
, ftype
, 0,
618 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (tem
));
621 gfc_init_block (&tmpblock
);
622 gfc_add_expr_to_block (&tmpblock
,
623 gfc_walk_alloc_comps (declf
, destf
,
625 then_b
= gfc_finish_block (&tmpblock
);
626 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
627 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
628 tem
= gfc_conv_descriptor_data_get (unshare_expr (declf
));
629 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
630 tem
= unshare_expr (declf
);
635 tem
= fold_convert (pvoid_type_node
, tem
);
636 tem
= fold_build2_loc (input_location
, NE_EXPR
,
637 logical_type_node
, tem
,
639 then_b
= build3_loc (input_location
, COND_EXPR
, void_type_node
,
641 build_empty_stmt (input_location
));
643 gfc_add_expr_to_block (&block
, then_b
);
645 if (kind
== WALK_ALLOC_COMPS_DTOR
)
647 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
648 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
650 tem
= gfc_conv_descriptor_data_get (unshare_expr (declf
));
651 tem
= gfc_deallocate_with_status (tem
, NULL_TREE
, NULL_TREE
,
652 NULL_TREE
, NULL_TREE
, true,
654 GFC_CAF_COARRAY_NOCOARRAY
);
655 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (tem
));
657 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
659 tem
= gfc_call_free (unshare_expr (declf
));
660 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (tem
));
665 return gfc_finish_block (&block
);
668 /* Return code to initialize DECL with its default constructor, or
669 NULL if there's nothing to do. */
672 gfc_omp_clause_default_ctor (tree clause
, tree decl
, tree outer
)
674 tree type
= TREE_TYPE (decl
), size
, ptr
, cond
, then_b
, else_b
;
675 stmtblock_t block
, cond_block
;
677 switch (OMP_CLAUSE_CODE (clause
))
679 case OMP_CLAUSE__LOOPTEMP_
:
680 case OMP_CLAUSE__REDUCTEMP_
:
681 case OMP_CLAUSE__CONDTEMP_
:
682 case OMP_CLAUSE__SCANTEMP_
:
684 case OMP_CLAUSE_PRIVATE
:
685 case OMP_CLAUSE_LASTPRIVATE
:
686 case OMP_CLAUSE_LINEAR
:
687 case OMP_CLAUSE_REDUCTION
:
688 case OMP_CLAUSE_IN_REDUCTION
:
689 case OMP_CLAUSE_TASK_REDUCTION
:
695 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
696 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
697 && (!GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
))
698 || !POINTER_TYPE_P (type
)))
700 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
703 gfc_start_block (&block
);
704 tree tem
= gfc_walk_alloc_comps (outer
, decl
,
705 OMP_CLAUSE_DECL (clause
),
706 WALK_ALLOC_COMPS_DEFAULT_CTOR
);
707 gfc_add_expr_to_block (&block
, tem
);
708 return gfc_finish_block (&block
);
713 gcc_assert (outer
!= NULL_TREE
);
715 /* Allocatable arrays and scalars in PRIVATE clauses need to be set to
716 "not currently allocated" allocation status if outer
717 array is "not currently allocated", otherwise should be allocated. */
718 gfc_start_block (&block
);
720 gfc_init_block (&cond_block
);
722 if (GFC_DESCRIPTOR_TYPE_P (type
))
724 gfc_add_modify (&cond_block
, decl
, outer
);
725 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
726 size
= gfc_conv_descriptor_ubound_get (decl
, rank
);
727 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
729 gfc_conv_descriptor_lbound_get (decl
, rank
));
730 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
731 size
, gfc_index_one_node
);
732 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
733 size
= fold_build2_loc (input_location
, MULT_EXPR
,
734 gfc_array_index_type
, size
,
735 gfc_conv_descriptor_stride_get (decl
, rank
));
736 tree esize
= fold_convert (gfc_array_index_type
,
737 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
738 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
740 size
= unshare_expr (size
);
741 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
745 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
746 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
747 gfc_allocate_using_malloc (&cond_block
, ptr
, size
, NULL_TREE
);
748 if (GFC_DESCRIPTOR_TYPE_P (type
))
749 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (decl
), ptr
);
751 gfc_add_modify (&cond_block
, unshare_expr (decl
),
752 fold_convert (TREE_TYPE (decl
), ptr
));
753 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
755 tree tem
= gfc_walk_alloc_comps (outer
, decl
,
756 OMP_CLAUSE_DECL (clause
),
757 WALK_ALLOC_COMPS_DEFAULT_CTOR
);
758 gfc_add_expr_to_block (&cond_block
, tem
);
760 then_b
= gfc_finish_block (&cond_block
);
762 /* Reduction clause requires allocated ALLOCATABLE. */
763 if (OMP_CLAUSE_CODE (clause
) != OMP_CLAUSE_REDUCTION
764 && OMP_CLAUSE_CODE (clause
) != OMP_CLAUSE_IN_REDUCTION
765 && OMP_CLAUSE_CODE (clause
) != OMP_CLAUSE_TASK_REDUCTION
)
767 gfc_init_block (&cond_block
);
768 if (GFC_DESCRIPTOR_TYPE_P (type
))
769 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (decl
),
772 gfc_add_modify (&cond_block
, unshare_expr (decl
),
773 build_zero_cst (TREE_TYPE (decl
)));
774 else_b
= gfc_finish_block (&cond_block
);
776 tree tem
= fold_convert (pvoid_type_node
,
777 GFC_DESCRIPTOR_TYPE_P (type
)
778 ? gfc_conv_descriptor_data_get (outer
) : outer
);
779 tem
= unshare_expr (tem
);
780 cond
= fold_build2_loc (input_location
, NE_EXPR
, logical_type_node
,
781 tem
, null_pointer_node
);
782 gfc_add_expr_to_block (&block
,
783 build3_loc (input_location
, COND_EXPR
,
784 void_type_node
, cond
, then_b
,
786 /* Avoid -W*uninitialized warnings. */
788 suppress_warning (decl
, OPT_Wuninitialized
);
791 gfc_add_expr_to_block (&block
, then_b
);
793 return gfc_finish_block (&block
);
796 /* Build and return code for a copy constructor from SRC to DEST. */
799 gfc_omp_clause_copy_ctor (tree clause
, tree dest
, tree src
)
801 tree type
= TREE_TYPE (dest
), ptr
, size
, call
;
802 tree decl_type
= TREE_TYPE (OMP_CLAUSE_DECL (clause
));
803 tree cond
, then_b
, else_b
;
804 stmtblock_t block
, cond_block
;
806 gcc_assert (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_FIRSTPRIVATE
807 || OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LINEAR
);
809 if (DECL_ARTIFICIAL (OMP_CLAUSE_DECL (clause
))
810 && DECL_LANG_SPECIFIC (OMP_CLAUSE_DECL (clause
))
811 && GFC_DECL_SAVED_DESCRIPTOR (OMP_CLAUSE_DECL (clause
)))
813 = TREE_TYPE (GFC_DECL_SAVED_DESCRIPTOR (OMP_CLAUSE_DECL (clause
)));
815 if (gfc_is_polymorphic_nonptr (decl_type
))
817 if (POINTER_TYPE_P (decl_type
))
818 decl_type
= TREE_TYPE (decl_type
);
819 decl_type
= TREE_TYPE (TYPE_FIELDS (decl_type
));
820 if (GFC_DESCRIPTOR_TYPE_P (decl_type
) || GFC_ARRAY_TYPE_P (decl_type
))
821 fatal_error (input_location
,
822 "Sorry, polymorphic arrays not yet supported for "
825 tree nelems
= build_int_cst (size_type_node
, 1); /* Scalar. */
826 tree src_data
= gfc_class_data_get (unshare_expr (src
));
827 tree dest_data
= gfc_class_data_get (unshare_expr (dest
));
828 bool unlimited
= gfc_is_unlimited_polymorphic_nonptr (type
);
830 gfc_start_block (&block
);
831 gfc_add_modify (&block
, gfc_class_vptr_get (dest
),
832 gfc_class_vptr_get (src
));
833 gfc_init_block (&cond_block
);
837 src_len
= gfc_class_len_get (src
);
838 gfc_add_modify (&cond_block
, gfc_class_len_get (unshare_expr (dest
)), src_len
);
841 /* Use: size = class._vtab._size * (class._len > 0 ? class._len : 1). */
842 size
= fold_convert (size_type_node
, gfc_class_vtab_size_get (src
));
845 cond
= fold_build2_loc (input_location
, GT_EXPR
, boolean_type_node
,
846 unshare_expr (src_len
),
847 build_zero_cst (TREE_TYPE (src_len
)));
848 cond
= build3_loc (input_location
, COND_EXPR
, size_type_node
, cond
,
849 fold_convert (size_type_node
,
850 unshare_expr (src_len
)),
851 build_int_cst (size_type_node
, 1));
852 size
= fold_build2_loc (input_location
, MULT_EXPR
, size_type_node
,
856 /* Malloc memory + call class->_vpt->_copy. */
857 call
= builtin_decl_explicit (BUILT_IN_MALLOC
);
858 call
= build_call_expr_loc (input_location
, call
, 1, size
);
859 gfc_add_modify (&cond_block
, dest_data
,
860 fold_convert (TREE_TYPE (dest_data
), call
));
861 gfc_add_expr_to_block (&cond_block
,
862 gfc_copy_class_to_class (src
, dest
, nelems
,
865 gcc_assert (TREE_CODE (dest_data
) == COMPONENT_REF
);
866 if (!GFC_DECL_GET_SCALAR_ALLOCATABLE (TREE_OPERAND (dest_data
, 1)))
868 gfc_add_block_to_block (&block
, &cond_block
);
872 /* Create: if (class._data != 0) <cond_block> else class._data = NULL; */
873 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
874 src_data
, null_pointer_node
);
875 gfc_add_expr_to_block (&block
, build3_loc (input_location
, COND_EXPR
,
876 void_type_node
, cond
,
877 gfc_finish_block (&cond_block
),
878 fold_build2_loc (input_location
, MODIFY_EXPR
, void_type_node
,
879 unshare_expr (dest_data
), null_pointer_node
)));
881 return gfc_finish_block (&block
);
884 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
885 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
886 && (!GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
))
887 || !POINTER_TYPE_P (type
)))
889 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
891 gfc_start_block (&block
);
892 gfc_add_modify (&block
, dest
, src
);
893 tree tem
= gfc_walk_alloc_comps (src
, dest
, OMP_CLAUSE_DECL (clause
),
894 WALK_ALLOC_COMPS_COPY_CTOR
);
895 gfc_add_expr_to_block (&block
, tem
);
896 return gfc_finish_block (&block
);
899 return build2_v (MODIFY_EXPR
, dest
, src
);
902 /* Allocatable arrays in FIRSTPRIVATE clauses need to be allocated
903 and copied from SRC. */
904 gfc_start_block (&block
);
906 gfc_init_block (&cond_block
);
908 gfc_add_modify (&cond_block
, dest
, fold_convert (TREE_TYPE (dest
), src
));
909 if (GFC_DESCRIPTOR_TYPE_P (type
))
911 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
912 size
= gfc_conv_descriptor_ubound_get (dest
, rank
);
913 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
915 gfc_conv_descriptor_lbound_get (dest
, rank
));
916 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
917 size
, gfc_index_one_node
);
918 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
919 size
= fold_build2_loc (input_location
, MULT_EXPR
,
920 gfc_array_index_type
, size
,
921 gfc_conv_descriptor_stride_get (dest
, rank
));
922 tree esize
= fold_convert (gfc_array_index_type
,
923 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
924 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
926 size
= unshare_expr (size
);
927 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
931 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
932 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
933 gfc_allocate_using_malloc (&cond_block
, ptr
, size
, NULL_TREE
);
934 if (GFC_DESCRIPTOR_TYPE_P (type
))
935 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (dest
), ptr
);
937 gfc_add_modify (&cond_block
, unshare_expr (dest
),
938 fold_convert (TREE_TYPE (dest
), ptr
));
940 tree srcptr
= GFC_DESCRIPTOR_TYPE_P (type
)
941 ? gfc_conv_descriptor_data_get (src
) : src
;
942 srcptr
= unshare_expr (srcptr
);
943 srcptr
= fold_convert (pvoid_type_node
, srcptr
);
944 call
= build_call_expr_loc (input_location
,
945 builtin_decl_explicit (BUILT_IN_MEMCPY
), 3, ptr
,
947 gfc_add_expr_to_block (&cond_block
, fold_convert (void_type_node
, call
));
948 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
950 tree tem
= gfc_walk_alloc_comps (src
, dest
,
951 OMP_CLAUSE_DECL (clause
),
952 WALK_ALLOC_COMPS_COPY_CTOR
);
953 gfc_add_expr_to_block (&cond_block
, tem
);
955 then_b
= gfc_finish_block (&cond_block
);
957 gfc_init_block (&cond_block
);
958 if (GFC_DESCRIPTOR_TYPE_P (type
))
959 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (dest
),
962 gfc_add_modify (&cond_block
, unshare_expr (dest
),
963 build_zero_cst (TREE_TYPE (dest
)));
964 else_b
= gfc_finish_block (&cond_block
);
966 cond
= fold_build2_loc (input_location
, NE_EXPR
, logical_type_node
,
967 unshare_expr (srcptr
), null_pointer_node
);
968 gfc_add_expr_to_block (&block
,
969 build3_loc (input_location
, COND_EXPR
,
970 void_type_node
, cond
, then_b
, else_b
));
971 /* Avoid -W*uninitialized warnings. */
973 suppress_warning (dest
, OPT_Wuninitialized
);
975 return gfc_finish_block (&block
);
978 /* Similarly, except use an intrinsic or pointer assignment operator
982 gfc_omp_clause_assign_op (tree clause
, tree dest
, tree src
)
984 tree type
= TREE_TYPE (dest
), ptr
, size
, call
, nonalloc
;
985 tree cond
, then_b
, else_b
;
986 stmtblock_t block
, cond_block
, cond_block2
, inner_block
;
988 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
989 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
990 && (!GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
))
991 || !POINTER_TYPE_P (type
)))
993 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
995 gfc_start_block (&block
);
996 /* First dealloc any allocatable components in DEST. */
997 tree tem
= gfc_walk_alloc_comps (dest
, NULL_TREE
,
998 OMP_CLAUSE_DECL (clause
),
999 WALK_ALLOC_COMPS_DTOR
);
1000 gfc_add_expr_to_block (&block
, tem
);
1001 /* Then copy over toplevel data. */
1002 gfc_add_modify (&block
, dest
, src
);
1003 /* Finally allocate any allocatable components and copy. */
1004 tem
= gfc_walk_alloc_comps (src
, dest
, OMP_CLAUSE_DECL (clause
),
1005 WALK_ALLOC_COMPS_COPY_CTOR
);
1006 gfc_add_expr_to_block (&block
, tem
);
1007 return gfc_finish_block (&block
);
1010 return build2_v (MODIFY_EXPR
, dest
, src
);
1013 gfc_start_block (&block
);
1015 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
1017 then_b
= gfc_walk_alloc_comps (dest
, NULL_TREE
, OMP_CLAUSE_DECL (clause
),
1018 WALK_ALLOC_COMPS_DTOR
);
1019 tree tem
= fold_convert (pvoid_type_node
,
1020 GFC_DESCRIPTOR_TYPE_P (type
)
1021 ? gfc_conv_descriptor_data_get (dest
) : dest
);
1022 tem
= unshare_expr (tem
);
1023 cond
= fold_build2_loc (input_location
, NE_EXPR
, logical_type_node
,
1024 tem
, null_pointer_node
);
1025 tem
= build3_loc (input_location
, COND_EXPR
, void_type_node
, cond
,
1026 then_b
, build_empty_stmt (input_location
));
1027 gfc_add_expr_to_block (&block
, tem
);
1030 gfc_init_block (&cond_block
);
1032 if (GFC_DESCRIPTOR_TYPE_P (type
))
1034 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
1035 size
= gfc_conv_descriptor_ubound_get (src
, rank
);
1036 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
1038 gfc_conv_descriptor_lbound_get (src
, rank
));
1039 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
1040 size
, gfc_index_one_node
);
1041 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
1042 size
= fold_build2_loc (input_location
, MULT_EXPR
,
1043 gfc_array_index_type
, size
,
1044 gfc_conv_descriptor_stride_get (src
, rank
));
1045 tree esize
= fold_convert (gfc_array_index_type
,
1046 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
1047 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
1049 size
= unshare_expr (size
);
1050 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
1054 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
1055 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
1057 tree destptr
= GFC_DESCRIPTOR_TYPE_P (type
)
1058 ? gfc_conv_descriptor_data_get (dest
) : dest
;
1059 destptr
= unshare_expr (destptr
);
1060 destptr
= fold_convert (pvoid_type_node
, destptr
);
1061 gfc_add_modify (&cond_block
, ptr
, destptr
);
1063 nonalloc
= fold_build2_loc (input_location
, EQ_EXPR
, logical_type_node
,
1064 destptr
, null_pointer_node
);
1066 if (GFC_DESCRIPTOR_TYPE_P (type
))
1069 for (i
= 0; i
< GFC_TYPE_ARRAY_RANK (type
); i
++)
1071 tree rank
= gfc_rank_cst
[i
];
1072 tree tem
= gfc_conv_descriptor_ubound_get (src
, rank
);
1073 tem
= fold_build2_loc (input_location
, MINUS_EXPR
,
1074 gfc_array_index_type
, tem
,
1075 gfc_conv_descriptor_lbound_get (src
, rank
));
1076 tem
= fold_build2_loc (input_location
, PLUS_EXPR
,
1077 gfc_array_index_type
, tem
,
1078 gfc_conv_descriptor_lbound_get (dest
, rank
));
1079 tem
= fold_build2_loc (input_location
, NE_EXPR
, logical_type_node
,
1080 tem
, gfc_conv_descriptor_ubound_get (dest
,
1082 cond
= fold_build2_loc (input_location
, TRUTH_ORIF_EXPR
,
1083 logical_type_node
, cond
, tem
);
1087 gfc_init_block (&cond_block2
);
1089 if (GFC_DESCRIPTOR_TYPE_P (type
))
1091 gfc_init_block (&inner_block
);
1092 gfc_allocate_using_malloc (&inner_block
, ptr
, size
, NULL_TREE
);
1093 then_b
= gfc_finish_block (&inner_block
);
1095 gfc_init_block (&inner_block
);
1096 gfc_add_modify (&inner_block
, ptr
,
1097 gfc_call_realloc (&inner_block
, ptr
, size
));
1098 else_b
= gfc_finish_block (&inner_block
);
1100 gfc_add_expr_to_block (&cond_block2
,
1101 build3_loc (input_location
, COND_EXPR
,
1103 unshare_expr (nonalloc
),
1105 gfc_add_modify (&cond_block2
, dest
, src
);
1106 gfc_conv_descriptor_data_set (&cond_block2
, unshare_expr (dest
), ptr
);
1110 gfc_allocate_using_malloc (&cond_block2
, ptr
, size
, NULL_TREE
);
1111 gfc_add_modify (&cond_block2
, unshare_expr (dest
),
1112 fold_convert (type
, ptr
));
1114 then_b
= gfc_finish_block (&cond_block2
);
1115 else_b
= build_empty_stmt (input_location
);
1117 gfc_add_expr_to_block (&cond_block
,
1118 build3_loc (input_location
, COND_EXPR
,
1119 void_type_node
, unshare_expr (cond
),
1122 tree srcptr
= GFC_DESCRIPTOR_TYPE_P (type
)
1123 ? gfc_conv_descriptor_data_get (src
) : src
;
1124 srcptr
= unshare_expr (srcptr
);
1125 srcptr
= fold_convert (pvoid_type_node
, srcptr
);
1126 call
= build_call_expr_loc (input_location
,
1127 builtin_decl_explicit (BUILT_IN_MEMCPY
), 3, ptr
,
1129 gfc_add_expr_to_block (&cond_block
, fold_convert (void_type_node
, call
));
1130 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
1132 tree tem
= gfc_walk_alloc_comps (src
, dest
,
1133 OMP_CLAUSE_DECL (clause
),
1134 WALK_ALLOC_COMPS_COPY_CTOR
);
1135 gfc_add_expr_to_block (&cond_block
, tem
);
1137 then_b
= gfc_finish_block (&cond_block
);
1139 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_COPYIN
)
1141 gfc_init_block (&cond_block
);
1142 if (GFC_DESCRIPTOR_TYPE_P (type
))
1144 tree tmp
= gfc_conv_descriptor_data_get (unshare_expr (dest
));
1145 tmp
= gfc_deallocate_with_status (tmp
, NULL_TREE
, NULL_TREE
,
1146 NULL_TREE
, NULL_TREE
, true, NULL
,
1147 GFC_CAF_COARRAY_NOCOARRAY
);
1148 gfc_add_expr_to_block (&cond_block
, tmp
);
1152 destptr
= gfc_evaluate_now (destptr
, &cond_block
);
1153 gfc_add_expr_to_block (&cond_block
, gfc_call_free (destptr
));
1154 gfc_add_modify (&cond_block
, unshare_expr (dest
),
1155 build_zero_cst (TREE_TYPE (dest
)));
1157 else_b
= gfc_finish_block (&cond_block
);
1159 cond
= fold_build2_loc (input_location
, NE_EXPR
, logical_type_node
,
1160 unshare_expr (srcptr
), null_pointer_node
);
1161 gfc_add_expr_to_block (&block
,
1162 build3_loc (input_location
, COND_EXPR
,
1163 void_type_node
, cond
,
1167 gfc_add_expr_to_block (&block
, then_b
);
1169 return gfc_finish_block (&block
);
1173 gfc_omp_linear_clause_add_loop (stmtblock_t
*block
, tree dest
, tree src
,
1174 tree add
, tree nelems
)
1176 stmtblock_t tmpblock
;
1177 tree desta
, srca
, index
= gfc_create_var (gfc_array_index_type
, "S");
1178 nelems
= gfc_evaluate_now (nelems
, block
);
1180 gfc_init_block (&tmpblock
);
1181 if (TREE_CODE (TREE_TYPE (dest
)) == ARRAY_TYPE
)
1183 desta
= gfc_build_array_ref (dest
, index
, NULL
);
1184 srca
= gfc_build_array_ref (src
, index
, NULL
);
1188 gcc_assert (POINTER_TYPE_P (TREE_TYPE (dest
)));
1189 tree idx
= fold_build2 (MULT_EXPR
, sizetype
,
1190 fold_convert (sizetype
, index
),
1191 TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dest
))));
1192 desta
= build_fold_indirect_ref (fold_build2 (POINTER_PLUS_EXPR
,
1193 TREE_TYPE (dest
), dest
,
1195 srca
= build_fold_indirect_ref (fold_build2 (POINTER_PLUS_EXPR
,
1196 TREE_TYPE (src
), src
,
1199 gfc_add_modify (&tmpblock
, desta
,
1200 fold_build2 (PLUS_EXPR
, TREE_TYPE (desta
),
1204 gfc_init_loopinfo (&loop
);
1206 loop
.from
[0] = gfc_index_zero_node
;
1207 loop
.loopvar
[0] = index
;
1208 loop
.to
[0] = nelems
;
1209 gfc_trans_scalarizing_loops (&loop
, &tmpblock
);
1210 gfc_add_block_to_block (block
, &loop
.pre
);
1213 /* Build and return code for a constructor of DEST that initializes
1214 it to SRC plus ADD (ADD is scalar integer). */
1217 gfc_omp_clause_linear_ctor (tree clause
, tree dest
, tree src
, tree add
)
1219 tree type
= TREE_TYPE (dest
), ptr
, size
, nelems
= NULL_TREE
;
1222 gcc_assert (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LINEAR
);
1224 gfc_start_block (&block
);
1225 add
= gfc_evaluate_now (add
, &block
);
1227 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
1228 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
1229 && (!GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
))
1230 || !POINTER_TYPE_P (type
)))
1232 bool compute_nelts
= false;
1233 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
1234 if (!TYPE_DOMAIN (type
)
1235 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == NULL_TREE
1236 || TYPE_MIN_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
1237 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
)
1238 compute_nelts
= true;
1239 else if (VAR_P (TYPE_MAX_VALUE (TYPE_DOMAIN (type
))))
1241 tree a
= DECL_ATTRIBUTES (TYPE_MAX_VALUE (TYPE_DOMAIN (type
)));
1242 if (lookup_attribute ("omp dummy var", a
))
1243 compute_nelts
= true;
1247 nelems
= fold_build2 (EXACT_DIV_EXPR
, sizetype
,
1248 TYPE_SIZE_UNIT (type
),
1249 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
1250 nelems
= size_binop (MINUS_EXPR
, nelems
, size_one_node
);
1253 nelems
= array_type_nelts (type
);
1254 nelems
= fold_convert (gfc_array_index_type
, nelems
);
1256 gfc_omp_linear_clause_add_loop (&block
, dest
, src
, add
, nelems
);
1257 return gfc_finish_block (&block
);
1260 /* Allocatable arrays in LINEAR clauses need to be allocated
1261 and copied from SRC. */
1262 gfc_add_modify (&block
, dest
, src
);
1263 if (GFC_DESCRIPTOR_TYPE_P (type
))
1265 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
1266 size
= gfc_conv_descriptor_ubound_get (dest
, rank
);
1267 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
1269 gfc_conv_descriptor_lbound_get (dest
, rank
));
1270 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
1271 size
, gfc_index_one_node
);
1272 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
1273 size
= fold_build2_loc (input_location
, MULT_EXPR
,
1274 gfc_array_index_type
, size
,
1275 gfc_conv_descriptor_stride_get (dest
, rank
));
1276 tree esize
= fold_convert (gfc_array_index_type
,
1277 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
1278 nelems
= gfc_evaluate_now (unshare_expr (size
), &block
);
1279 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
1280 nelems
, unshare_expr (esize
));
1281 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
1283 nelems
= fold_build2_loc (input_location
, MINUS_EXPR
,
1284 gfc_array_index_type
, nelems
,
1285 gfc_index_one_node
);
1288 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
1289 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
1290 gfc_allocate_using_malloc (&block
, ptr
, size
, NULL_TREE
);
1291 if (GFC_DESCRIPTOR_TYPE_P (type
))
1293 gfc_conv_descriptor_data_set (&block
, unshare_expr (dest
), ptr
);
1294 tree etype
= gfc_get_element_type (type
);
1295 ptr
= fold_convert (build_pointer_type (etype
), ptr
);
1296 tree srcptr
= gfc_conv_descriptor_data_get (unshare_expr (src
));
1297 srcptr
= fold_convert (build_pointer_type (etype
), srcptr
);
1298 gfc_omp_linear_clause_add_loop (&block
, ptr
, srcptr
, add
, nelems
);
1302 gfc_add_modify (&block
, unshare_expr (dest
),
1303 fold_convert (TREE_TYPE (dest
), ptr
));
1304 ptr
= fold_convert (TREE_TYPE (dest
), ptr
);
1305 tree dstm
= build_fold_indirect_ref (ptr
);
1306 tree srcm
= build_fold_indirect_ref (unshare_expr (src
));
1307 gfc_add_modify (&block
, dstm
,
1308 fold_build2 (PLUS_EXPR
, TREE_TYPE (add
), srcm
, add
));
1310 return gfc_finish_block (&block
);
1313 /* Build and return code destructing DECL. Return NULL if nothing
1317 gfc_omp_clause_dtor (tree clause
, tree decl
)
1319 tree type
= TREE_TYPE (decl
), tem
;
1320 tree decl_type
= TREE_TYPE (OMP_CLAUSE_DECL (clause
));
1322 if (DECL_ARTIFICIAL (OMP_CLAUSE_DECL (clause
))
1323 && DECL_LANG_SPECIFIC (OMP_CLAUSE_DECL (clause
))
1324 && GFC_DECL_SAVED_DESCRIPTOR (OMP_CLAUSE_DECL (clause
)))
1326 = TREE_TYPE (GFC_DECL_SAVED_DESCRIPTOR (OMP_CLAUSE_DECL (clause
)));
1327 if (gfc_is_polymorphic_nonptr (decl_type
))
1329 if (POINTER_TYPE_P (decl_type
))
1330 decl_type
= TREE_TYPE (decl_type
);
1331 decl_type
= TREE_TYPE (TYPE_FIELDS (decl_type
));
1332 if (GFC_DESCRIPTOR_TYPE_P (decl_type
) || GFC_ARRAY_TYPE_P (decl_type
))
1333 fatal_error (input_location
,
1334 "Sorry, polymorphic arrays not yet supported for "
1336 stmtblock_t block
, cond_block
;
1337 gfc_start_block (&block
);
1338 gfc_init_block (&cond_block
);
1339 tree final
= gfc_class_vtab_final_get (decl
);
1340 tree size
= fold_convert (size_type_node
, gfc_class_vtab_size_get (decl
));
1342 gfc_init_se (&se
, NULL
);
1343 symbol_attribute attr
= {};
1344 tree data
= gfc_class_data_get (decl
);
1345 tree desc
= gfc_conv_scalar_to_descriptor (&se
, data
, attr
);
1347 /* Call class->_vpt->_finalize + free. */
1348 tree call
= build_fold_indirect_ref (final
);
1349 call
= build_call_expr_loc (input_location
, call
, 3,
1350 gfc_build_addr_expr (NULL
, desc
),
1351 size
, boolean_false_node
);
1352 gfc_add_block_to_block (&cond_block
, &se
.pre
);
1353 gfc_add_expr_to_block (&cond_block
, fold_convert (void_type_node
, call
));
1354 gfc_add_block_to_block (&cond_block
, &se
.post
);
1355 /* Create: if (_vtab && _final) <cond_block> */
1356 tree cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
1357 gfc_class_vptr_get (decl
),
1359 tree cond2
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
1360 final
, null_pointer_node
);
1361 cond
= fold_build2_loc (input_location
, TRUTH_ANDIF_EXPR
,
1362 boolean_type_node
, cond
, cond2
);
1363 gfc_add_expr_to_block (&block
, build3_loc (input_location
, COND_EXPR
,
1364 void_type_node
, cond
,
1365 gfc_finish_block (&cond_block
), NULL_TREE
));
1366 call
= builtin_decl_explicit (BUILT_IN_FREE
);
1367 call
= build_call_expr_loc (input_location
, call
, 1, data
);
1368 gfc_add_expr_to_block (&block
, fold_convert (void_type_node
, call
));
1369 return gfc_finish_block (&block
);
1372 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
1373 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
1374 && (!GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
))
1375 || !POINTER_TYPE_P (type
)))
1377 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
1378 return gfc_walk_alloc_comps (decl
, NULL_TREE
,
1379 OMP_CLAUSE_DECL (clause
),
1380 WALK_ALLOC_COMPS_DTOR
);
1384 if (GFC_DESCRIPTOR_TYPE_P (type
))
1386 /* Allocatable arrays in FIRSTPRIVATE/LASTPRIVATE etc. clauses need
1387 to be deallocated if they were allocated. */
1388 tem
= gfc_conv_descriptor_data_get (decl
);
1389 tem
= gfc_deallocate_with_status (tem
, NULL_TREE
, NULL_TREE
, NULL_TREE
,
1390 NULL_TREE
, true, NULL
,
1391 GFC_CAF_COARRAY_NOCOARRAY
);
1394 tem
= gfc_call_free (decl
);
1395 tem
= gfc_omp_unshare_expr (tem
);
1397 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
1402 gfc_init_block (&block
);
1403 gfc_add_expr_to_block (&block
,
1404 gfc_walk_alloc_comps (decl
, NULL_TREE
,
1405 OMP_CLAUSE_DECL (clause
),
1406 WALK_ALLOC_COMPS_DTOR
));
1407 gfc_add_expr_to_block (&block
, tem
);
1408 then_b
= gfc_finish_block (&block
);
1410 tem
= fold_convert (pvoid_type_node
,
1411 GFC_DESCRIPTOR_TYPE_P (type
)
1412 ? gfc_conv_descriptor_data_get (decl
) : decl
);
1413 tem
= unshare_expr (tem
);
1414 tree cond
= fold_build2_loc (input_location
, NE_EXPR
, logical_type_node
,
1415 tem
, null_pointer_node
);
1416 tem
= build3_loc (input_location
, COND_EXPR
, void_type_node
, cond
,
1417 then_b
, build_empty_stmt (input_location
));
1422 /* Build a conditional expression in BLOCK. If COND_VAL is not
1423 null, then the block THEN_B is executed, otherwise ELSE_VAL
1424 is assigned to VAL. */
1427 gfc_build_cond_assign (stmtblock_t
*block
, tree val
, tree cond_val
,
1428 tree then_b
, tree else_val
)
1430 stmtblock_t cond_block
;
1431 tree else_b
= NULL_TREE
;
1432 tree val_ty
= TREE_TYPE (val
);
1436 gfc_init_block (&cond_block
);
1437 gfc_add_modify (&cond_block
, val
, fold_convert (val_ty
, else_val
));
1438 else_b
= gfc_finish_block (&cond_block
);
1440 gfc_add_expr_to_block (block
,
1441 build3_loc (input_location
, COND_EXPR
, void_type_node
,
1442 cond_val
, then_b
, else_b
));
1445 /* Build a conditional expression in BLOCK, returning a temporary
1446 variable containing the result. If COND_VAL is not null, then
1447 THEN_VAL will be assigned to the variable, otherwise ELSE_VAL
1452 gfc_build_cond_assign_expr (stmtblock_t
*block
, tree cond_val
,
1453 tree then_val
, tree else_val
)
1456 tree val_ty
= TREE_TYPE (then_val
);
1457 stmtblock_t cond_block
;
1459 val
= create_tmp_var (val_ty
);
1461 gfc_init_block (&cond_block
);
1462 gfc_add_modify (&cond_block
, val
, then_val
);
1463 tree then_b
= gfc_finish_block (&cond_block
);
1465 gfc_build_cond_assign (block
, val
, cond_val
, then_b
, else_val
);
1471 gfc_omp_finish_clause (tree c
, gimple_seq
*pre_p
, bool openacc
)
1473 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_MAP
)
1476 tree decl
= OMP_CLAUSE_DECL (c
);
1478 /* Assumed-size arrays can't be mapped implicitly, they have to be
1479 mapped explicitly using array sections. */
1480 if (TREE_CODE (decl
) == PARM_DECL
1481 && GFC_ARRAY_TYPE_P (TREE_TYPE (decl
))
1482 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl
)) == GFC_ARRAY_UNKNOWN
1483 && GFC_TYPE_ARRAY_UBOUND (TREE_TYPE (decl
),
1484 GFC_TYPE_ARRAY_RANK (TREE_TYPE (decl
)) - 1)
1487 error_at (OMP_CLAUSE_LOCATION (c
),
1488 "implicit mapping of assumed size array %qD", decl
);
1492 tree c2
= NULL_TREE
, c3
= NULL_TREE
, c4
= NULL_TREE
;
1493 tree present
= gfc_omp_check_optional_argument (decl
, true);
1494 if (POINTER_TYPE_P (TREE_TYPE (decl
)))
1496 if (!gfc_omp_privatize_by_reference (decl
)
1497 && !GFC_DECL_GET_SCALAR_POINTER (decl
)
1498 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
1499 && !GFC_DECL_CRAY_POINTEE (decl
)
1500 && !GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
1502 tree orig_decl
= decl
;
1504 c4
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
1505 OMP_CLAUSE_SET_MAP_KIND (c4
, GOMP_MAP_POINTER
);
1506 OMP_CLAUSE_DECL (c4
) = decl
;
1507 OMP_CLAUSE_SIZE (c4
) = size_int (0);
1508 decl
= build_fold_indirect_ref (decl
);
1510 && (GFC_DECL_GET_SCALAR_POINTER (orig_decl
)
1511 || GFC_DECL_GET_SCALAR_ALLOCATABLE (orig_decl
)))
1513 c2
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
1514 OMP_CLAUSE_SET_MAP_KIND (c2
, GOMP_MAP_POINTER
);
1515 OMP_CLAUSE_DECL (c2
) = decl
;
1516 OMP_CLAUSE_SIZE (c2
) = size_int (0);
1519 gfc_start_block (&block
);
1521 ptr
= gfc_build_cond_assign_expr (&block
, present
, decl
,
1523 gimplify_and_add (gfc_finish_block (&block
), pre_p
);
1524 ptr
= build_fold_indirect_ref (ptr
);
1525 OMP_CLAUSE_DECL (c
) = ptr
;
1526 OMP_CLAUSE_SIZE (c
) = TYPE_SIZE_UNIT (TREE_TYPE (ptr
));
1530 OMP_CLAUSE_DECL (c
) = decl
;
1531 OMP_CLAUSE_SIZE (c
) = NULL_TREE
;
1533 if (TREE_CODE (TREE_TYPE (orig_decl
)) == REFERENCE_TYPE
1534 && (GFC_DECL_GET_SCALAR_POINTER (orig_decl
)
1535 || GFC_DECL_GET_SCALAR_ALLOCATABLE (orig_decl
)))
1537 c3
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
1538 OMP_CLAUSE_SET_MAP_KIND (c3
, GOMP_MAP_POINTER
);
1539 OMP_CLAUSE_DECL (c3
) = unshare_expr (decl
);
1540 OMP_CLAUSE_SIZE (c3
) = size_int (0);
1541 decl
= build_fold_indirect_ref (decl
);
1542 OMP_CLAUSE_DECL (c
) = decl
;
1545 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
1548 gfc_start_block (&block
);
1549 tree type
= TREE_TYPE (decl
);
1550 tree ptr
= gfc_conv_descriptor_data_get (decl
);
1552 /* OpenMP: automatically map pointer targets with the pointer;
1553 hence, always update the descriptor/pointer itself.
1554 NOTE: This also remaps the pointer for allocatable arrays with
1555 'target' attribute which also don't have the 'restrict' qualifier. */
1556 bool always_modifier
= false;
1559 && !(TYPE_QUALS (TREE_TYPE (ptr
)) & TYPE_QUAL_RESTRICT
))
1560 always_modifier
= true;
1563 ptr
= gfc_build_cond_assign_expr (&block
, present
, ptr
,
1565 ptr
= fold_convert (build_pointer_type (char_type_node
), ptr
);
1566 ptr
= build_fold_indirect_ref (ptr
);
1567 OMP_CLAUSE_DECL (c
) = ptr
;
1568 c2
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
1569 OMP_CLAUSE_SET_MAP_KIND (c2
, GOMP_MAP_TO_PSET
);
1572 ptr
= create_tmp_var (TREE_TYPE (TREE_OPERAND (decl
, 0)));
1573 gfc_add_modify (&block
, ptr
, TREE_OPERAND (decl
, 0));
1575 OMP_CLAUSE_DECL (c2
) = build_fold_indirect_ref (ptr
);
1578 OMP_CLAUSE_DECL (c2
) = decl
;
1579 OMP_CLAUSE_SIZE (c2
) = TYPE_SIZE_UNIT (type
);
1580 c3
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
1581 OMP_CLAUSE_SET_MAP_KIND (c3
, always_modifier
? GOMP_MAP_ALWAYS_POINTER
1582 : GOMP_MAP_POINTER
);
1585 ptr
= gfc_conv_descriptor_data_get (decl
);
1586 ptr
= gfc_build_addr_expr (NULL
, ptr
);
1587 ptr
= gfc_build_cond_assign_expr (&block
, present
,
1588 ptr
, null_pointer_node
);
1589 ptr
= build_fold_indirect_ref (ptr
);
1590 OMP_CLAUSE_DECL (c3
) = ptr
;
1593 OMP_CLAUSE_DECL (c3
) = gfc_conv_descriptor_data_get (decl
);
1594 OMP_CLAUSE_SIZE (c3
) = size_int (0);
1595 tree size
= create_tmp_var (gfc_array_index_type
);
1596 tree elemsz
= TYPE_SIZE_UNIT (gfc_get_element_type (type
));
1597 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
1598 if (GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_POINTER
1599 || GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_POINTER_CONT
)
1601 stmtblock_t cond_block
;
1602 tree tem
, then_b
, else_b
, zero
, cond
;
1604 gfc_init_block (&cond_block
);
1605 tem
= gfc_full_array_size (&cond_block
, decl
,
1606 GFC_TYPE_ARRAY_RANK (type
));
1607 gfc_add_modify (&cond_block
, size
, tem
);
1608 gfc_add_modify (&cond_block
, size
,
1609 fold_build2 (MULT_EXPR
, gfc_array_index_type
,
1611 then_b
= gfc_finish_block (&cond_block
);
1612 gfc_init_block (&cond_block
);
1613 zero
= build_int_cst (gfc_array_index_type
, 0);
1614 gfc_add_modify (&cond_block
, size
, zero
);
1615 else_b
= gfc_finish_block (&cond_block
);
1616 tem
= gfc_conv_descriptor_data_get (decl
);
1617 tem
= fold_convert (pvoid_type_node
, tem
);
1618 cond
= fold_build2_loc (input_location
, NE_EXPR
,
1619 boolean_type_node
, tem
, null_pointer_node
);
1622 cond
= fold_build2_loc (input_location
, TRUTH_ANDIF_EXPR
,
1623 boolean_type_node
, present
, cond
);
1625 gfc_add_expr_to_block (&block
, build3_loc (input_location
, COND_EXPR
,
1626 void_type_node
, cond
,
1631 stmtblock_t cond_block
;
1634 gfc_init_block (&cond_block
);
1635 gfc_add_modify (&cond_block
, size
,
1636 gfc_full_array_size (&cond_block
, decl
,
1637 GFC_TYPE_ARRAY_RANK (type
)));
1638 gfc_add_modify (&cond_block
, size
,
1639 fold_build2 (MULT_EXPR
, gfc_array_index_type
,
1641 then_b
= gfc_finish_block (&cond_block
);
1643 gfc_build_cond_assign (&block
, size
, present
, then_b
,
1644 build_int_cst (gfc_array_index_type
, 0));
1648 gfc_add_modify (&block
, size
,
1649 gfc_full_array_size (&block
, decl
,
1650 GFC_TYPE_ARRAY_RANK (type
)));
1651 gfc_add_modify (&block
, size
,
1652 fold_build2 (MULT_EXPR
, gfc_array_index_type
,
1655 OMP_CLAUSE_SIZE (c
) = size
;
1656 tree stmt
= gfc_finish_block (&block
);
1657 gimplify_and_add (stmt
, pre_p
);
1660 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
1662 = DECL_P (decl
) ? DECL_SIZE_UNIT (decl
)
1663 : TYPE_SIZE_UNIT (TREE_TYPE (decl
));
1664 if (gimplify_expr (&OMP_CLAUSE_SIZE (c
), pre_p
,
1665 NULL
, is_gimple_val
, fb_rvalue
) == GS_ERROR
)
1666 OMP_CLAUSE_SIZE (c
) = size_int (0);
1669 OMP_CLAUSE_CHAIN (c2
) = OMP_CLAUSE_CHAIN (last
);
1670 OMP_CLAUSE_CHAIN (last
) = c2
;
1675 OMP_CLAUSE_CHAIN (c3
) = OMP_CLAUSE_CHAIN (last
);
1676 OMP_CLAUSE_CHAIN (last
) = c3
;
1681 OMP_CLAUSE_CHAIN (c4
) = OMP_CLAUSE_CHAIN (last
);
1682 OMP_CLAUSE_CHAIN (last
) = c4
;
1687 /* Return true if DECL is a scalar variable (for the purpose of
1688 implicit firstprivatization/mapping). Only if 'ptr_alloc_ok.'
1689 is true, allocatables and pointers are permitted. */
1692 gfc_omp_scalar_p (tree decl
, bool ptr_alloc_ok
)
1694 tree type
= TREE_TYPE (decl
);
1695 if (TREE_CODE (type
) == REFERENCE_TYPE
)
1696 type
= TREE_TYPE (type
);
1697 if (TREE_CODE (type
) == POINTER_TYPE
)
1699 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
1700 || GFC_DECL_GET_SCALAR_POINTER (decl
))
1704 type
= TREE_TYPE (type
);
1706 if (GFC_ARRAY_TYPE_P (type
)
1707 || GFC_CLASS_TYPE_P (type
))
1710 if ((TREE_CODE (type
) == ARRAY_TYPE
|| TREE_CODE (type
) == INTEGER_TYPE
)
1711 && TYPE_STRING_FLAG (type
))
1713 if (INTEGRAL_TYPE_P (type
)
1714 || SCALAR_FLOAT_TYPE_P (type
)
1715 || COMPLEX_FLOAT_TYPE_P (type
))
1721 /* Return true if DECL is a scalar with target attribute but does not have the
1722 allocatable (or pointer) attribute (for the purpose of implicit mapping). */
1725 gfc_omp_scalar_target_p (tree decl
)
1727 return (DECL_P (decl
) && GFC_DECL_GET_SCALAR_TARGET (decl
)
1728 && gfc_omp_scalar_p (decl
, false));
1732 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
1733 disregarded in OpenMP construct, because it is going to be
1734 remapped during OpenMP lowering. SHARED is true if DECL
1735 is going to be shared, false if it is going to be privatized. */
1738 gfc_omp_disregard_value_expr (tree decl
, bool shared
)
1740 if (GFC_DECL_COMMON_OR_EQUIV (decl
)
1741 && DECL_HAS_VALUE_EXPR_P (decl
))
1743 tree value
= DECL_VALUE_EXPR (decl
);
1745 if (TREE_CODE (value
) == COMPONENT_REF
1746 && VAR_P (TREE_OPERAND (value
, 0))
1747 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value
, 0)))
1749 /* If variable in COMMON or EQUIVALENCE is privatized, return
1750 true, as just that variable is supposed to be privatized,
1751 not the whole COMMON or whole EQUIVALENCE.
1752 For shared variables in COMMON or EQUIVALENCE, let them be
1753 gimplified to DECL_VALUE_EXPR, so that for multiple shared vars
1754 from the same COMMON or EQUIVALENCE just one sharing of the
1755 whole COMMON or EQUIVALENCE is enough. */
1760 if (GFC_DECL_RESULT (decl
) && DECL_HAS_VALUE_EXPR_P (decl
))
1766 /* Return true if DECL that is shared iff SHARED is true should
1767 be put into OMP_CLAUSE_PRIVATE with OMP_CLAUSE_PRIVATE_DEBUG
1771 gfc_omp_private_debug_clause (tree decl
, bool shared
)
1773 if (GFC_DECL_CRAY_POINTEE (decl
))
1776 if (GFC_DECL_COMMON_OR_EQUIV (decl
)
1777 && DECL_HAS_VALUE_EXPR_P (decl
))
1779 tree value
= DECL_VALUE_EXPR (decl
);
1781 if (TREE_CODE (value
) == COMPONENT_REF
1782 && VAR_P (TREE_OPERAND (value
, 0))
1783 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value
, 0)))
1790 /* Register language specific type size variables as potentially OpenMP
1791 firstprivate variables. */
1794 gfc_omp_firstprivatize_type_sizes (struct gimplify_omp_ctx
*ctx
, tree type
)
1796 if (GFC_ARRAY_TYPE_P (type
) || GFC_DESCRIPTOR_TYPE_P (type
))
1800 gcc_assert (TYPE_LANG_SPECIFIC (type
) != NULL
);
1801 for (r
= 0; r
< GFC_TYPE_ARRAY_RANK (type
); r
++)
1803 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_LBOUND (type
, r
));
1804 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_UBOUND (type
, r
));
1805 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_STRIDE (type
, r
));
1807 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_SIZE (type
));
1808 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_OFFSET (type
));
1814 gfc_trans_add_clause (tree node
, tree tail
)
1816 OMP_CLAUSE_CHAIN (node
) = tail
;
1821 gfc_trans_omp_variable (gfc_symbol
*sym
, bool declare_simd
)
1826 gfc_symbol
*proc_sym
;
1827 gfc_formal_arglist
*f
;
1829 gcc_assert (sym
->attr
.dummy
);
1830 proc_sym
= sym
->ns
->proc_name
;
1831 if (proc_sym
->attr
.entry_master
)
1833 if (gfc_return_by_reference (proc_sym
))
1836 if (proc_sym
->ts
.type
== BT_CHARACTER
)
1839 for (f
= gfc_sym_get_dummy_args (proc_sym
); f
; f
= f
->next
)
1845 return build_int_cst (integer_type_node
, cnt
);
1848 tree t
= gfc_get_symbol_decl (sym
);
1852 bool alternate_entry
;
1855 return_value
= sym
->attr
.function
&& sym
->result
== sym
;
1856 alternate_entry
= sym
->attr
.function
&& sym
->attr
.entry
1857 && sym
->result
== sym
;
1858 entry_master
= sym
->attr
.result
1859 && sym
->ns
->proc_name
->attr
.entry_master
1860 && !gfc_return_by_reference (sym
->ns
->proc_name
);
1861 parent_decl
= current_function_decl
1862 ? DECL_CONTEXT (current_function_decl
) : NULL_TREE
;
1864 if ((t
== parent_decl
&& return_value
)
1865 || (sym
->ns
&& sym
->ns
->proc_name
1866 && sym
->ns
->proc_name
->backend_decl
== parent_decl
1867 && (alternate_entry
|| entry_master
)))
1872 /* Special case for assigning the return value of a function.
1873 Self recursive functions must have an explicit return value. */
1874 if (return_value
&& (t
== current_function_decl
|| parent_flag
))
1875 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
1877 /* Similarly for alternate entry points. */
1878 else if (alternate_entry
1879 && (sym
->ns
->proc_name
->backend_decl
== current_function_decl
1882 gfc_entry_list
*el
= NULL
;
1884 for (el
= sym
->ns
->entries
; el
; el
= el
->next
)
1887 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
1892 else if (entry_master
1893 && (sym
->ns
->proc_name
->backend_decl
== current_function_decl
1895 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
1901 gfc_trans_omp_variable_list (enum omp_clause_code code
,
1902 gfc_omp_namelist
*namelist
, tree list
,
1905 for (; namelist
!= NULL
; namelist
= namelist
->next
)
1906 if (namelist
->sym
->attr
.referenced
|| declare_simd
)
1908 tree t
= gfc_trans_omp_variable (namelist
->sym
, declare_simd
);
1909 if (t
!= error_mark_node
)
1911 tree node
= build_omp_clause (input_location
, code
);
1912 OMP_CLAUSE_DECL (node
) = t
;
1913 list
= gfc_trans_add_clause (node
, list
);
1915 if (code
== OMP_CLAUSE_LASTPRIVATE
1916 && namelist
->u
.lastprivate_conditional
)
1917 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (node
) = 1;
1923 struct omp_udr_find_orig_data
1925 gfc_omp_udr
*omp_udr
;
1930 omp_udr_find_orig (gfc_expr
**e
, int *walk_subtrees ATTRIBUTE_UNUSED
,
1933 struct omp_udr_find_orig_data
*cd
= (struct omp_udr_find_orig_data
*) data
;
1934 if ((*e
)->expr_type
== EXPR_VARIABLE
1935 && (*e
)->symtree
->n
.sym
== cd
->omp_udr
->omp_orig
)
1936 cd
->omp_orig_seen
= true;
1942 gfc_trans_omp_array_reduction_or_udr (tree c
, gfc_omp_namelist
*n
, locus where
)
1944 gfc_symbol
*sym
= n
->sym
;
1945 gfc_symtree
*root1
= NULL
, *root2
= NULL
, *root3
= NULL
, *root4
= NULL
;
1946 gfc_symtree
*symtree1
, *symtree2
, *symtree3
, *symtree4
= NULL
;
1947 gfc_symbol init_val_sym
, outer_sym
, intrinsic_sym
;
1948 gfc_symbol omp_var_copy
[4];
1949 gfc_expr
*e1
, *e2
, *e3
, *e4
;
1951 tree decl
, backend_decl
, stmt
, type
, outer_decl
;
1952 locus old_loc
= gfc_current_locus
;
1955 gfc_omp_udr
*udr
= n
->u2
.udr
? n
->u2
.udr
->udr
: NULL
;
1957 decl
= OMP_CLAUSE_DECL (c
);
1958 gfc_current_locus
= where
;
1959 type
= TREE_TYPE (decl
);
1960 outer_decl
= create_tmp_var_raw (type
);
1961 if (TREE_CODE (decl
) == PARM_DECL
1962 && TREE_CODE (type
) == REFERENCE_TYPE
1963 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (type
))
1964 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (type
)) == GFC_ARRAY_ALLOCATABLE
)
1966 decl
= build_fold_indirect_ref (decl
);
1967 type
= TREE_TYPE (type
);
1970 /* Create a fake symbol for init value. */
1971 memset (&init_val_sym
, 0, sizeof (init_val_sym
));
1972 init_val_sym
.ns
= sym
->ns
;
1973 init_val_sym
.name
= sym
->name
;
1974 init_val_sym
.ts
= sym
->ts
;
1975 init_val_sym
.attr
.referenced
= 1;
1976 init_val_sym
.declared_at
= where
;
1977 init_val_sym
.attr
.flavor
= FL_VARIABLE
;
1978 if (OMP_CLAUSE_REDUCTION_CODE (c
) != ERROR_MARK
)
1979 backend_decl
= omp_reduction_init (c
, gfc_sym_type (&init_val_sym
));
1980 else if (udr
->initializer_ns
)
1981 backend_decl
= NULL
;
1983 switch (sym
->ts
.type
)
1989 backend_decl
= build_zero_cst (gfc_sym_type (&init_val_sym
));
1992 backend_decl
= NULL_TREE
;
1995 init_val_sym
.backend_decl
= backend_decl
;
1997 /* Create a fake symbol for the outer array reference. */
2000 outer_sym
.as
= gfc_copy_array_spec (sym
->as
);
2001 outer_sym
.attr
.dummy
= 0;
2002 outer_sym
.attr
.result
= 0;
2003 outer_sym
.attr
.flavor
= FL_VARIABLE
;
2004 outer_sym
.backend_decl
= outer_decl
;
2005 if (decl
!= OMP_CLAUSE_DECL (c
))
2006 outer_sym
.backend_decl
= build_fold_indirect_ref (outer_decl
);
2008 /* Create fake symtrees for it. */
2009 symtree1
= gfc_new_symtree (&root1
, sym
->name
);
2010 symtree1
->n
.sym
= sym
;
2011 gcc_assert (symtree1
== root1
);
2013 symtree2
= gfc_new_symtree (&root2
, sym
->name
);
2014 symtree2
->n
.sym
= &init_val_sym
;
2015 gcc_assert (symtree2
== root2
);
2017 symtree3
= gfc_new_symtree (&root3
, sym
->name
);
2018 symtree3
->n
.sym
= &outer_sym
;
2019 gcc_assert (symtree3
== root3
);
2021 memset (omp_var_copy
, 0, sizeof omp_var_copy
);
2024 omp_var_copy
[0] = *udr
->omp_out
;
2025 omp_var_copy
[1] = *udr
->omp_in
;
2026 *udr
->omp_out
= outer_sym
;
2027 *udr
->omp_in
= *sym
;
2028 if (udr
->initializer_ns
)
2030 omp_var_copy
[2] = *udr
->omp_priv
;
2031 omp_var_copy
[3] = *udr
->omp_orig
;
2032 *udr
->omp_priv
= *sym
;
2033 *udr
->omp_orig
= outer_sym
;
2037 /* Create expressions. */
2038 e1
= gfc_get_expr ();
2039 e1
->expr_type
= EXPR_VARIABLE
;
2041 e1
->symtree
= symtree1
;
2043 if (sym
->attr
.dimension
)
2045 e1
->ref
= ref
= gfc_get_ref ();
2046 ref
->type
= REF_ARRAY
;
2047 ref
->u
.ar
.where
= where
;
2048 ref
->u
.ar
.as
= sym
->as
;
2049 ref
->u
.ar
.type
= AR_FULL
;
2050 ref
->u
.ar
.dimen
= 0;
2052 t
= gfc_resolve_expr (e1
);
2056 if (backend_decl
!= NULL_TREE
)
2058 e2
= gfc_get_expr ();
2059 e2
->expr_type
= EXPR_VARIABLE
;
2061 e2
->symtree
= symtree2
;
2063 t
= gfc_resolve_expr (e2
);
2066 else if (udr
->initializer_ns
== NULL
)
2068 gcc_assert (sym
->ts
.type
== BT_DERIVED
);
2069 e2
= gfc_default_initializer (&sym
->ts
);
2071 t
= gfc_resolve_expr (e2
);
2074 else if (n
->u2
.udr
->initializer
->op
== EXEC_ASSIGN
)
2076 e2
= gfc_copy_expr (n
->u2
.udr
->initializer
->expr2
);
2077 t
= gfc_resolve_expr (e2
);
2080 if (udr
&& udr
->initializer_ns
)
2082 struct omp_udr_find_orig_data cd
;
2084 cd
.omp_orig_seen
= false;
2085 gfc_code_walker (&n
->u2
.udr
->initializer
,
2086 gfc_dummy_code_callback
, omp_udr_find_orig
, &cd
);
2087 if (cd
.omp_orig_seen
)
2088 OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
) = 1;
2091 e3
= gfc_copy_expr (e1
);
2092 e3
->symtree
= symtree3
;
2093 t
= gfc_resolve_expr (e3
);
2098 switch (OMP_CLAUSE_REDUCTION_CODE (c
))
2102 e4
= gfc_add (e3
, e1
);
2105 e4
= gfc_multiply (e3
, e1
);
2107 case TRUTH_ANDIF_EXPR
:
2108 e4
= gfc_and (e3
, e1
);
2110 case TRUTH_ORIF_EXPR
:
2111 e4
= gfc_or (e3
, e1
);
2114 e4
= gfc_eqv (e3
, e1
);
2117 e4
= gfc_neqv (e3
, e1
);
2135 if (n
->u2
.udr
->combiner
->op
== EXEC_ASSIGN
)
2138 e3
= gfc_copy_expr (n
->u2
.udr
->combiner
->expr1
);
2139 e4
= gfc_copy_expr (n
->u2
.udr
->combiner
->expr2
);
2140 t
= gfc_resolve_expr (e3
);
2142 t
= gfc_resolve_expr (e4
);
2151 memset (&intrinsic_sym
, 0, sizeof (intrinsic_sym
));
2152 intrinsic_sym
.ns
= sym
->ns
;
2153 intrinsic_sym
.name
= iname
;
2154 intrinsic_sym
.ts
= sym
->ts
;
2155 intrinsic_sym
.attr
.referenced
= 1;
2156 intrinsic_sym
.attr
.intrinsic
= 1;
2157 intrinsic_sym
.attr
.function
= 1;
2158 intrinsic_sym
.attr
.implicit_type
= 1;
2159 intrinsic_sym
.result
= &intrinsic_sym
;
2160 intrinsic_sym
.declared_at
= where
;
2162 symtree4
= gfc_new_symtree (&root4
, iname
);
2163 symtree4
->n
.sym
= &intrinsic_sym
;
2164 gcc_assert (symtree4
== root4
);
2166 e4
= gfc_get_expr ();
2167 e4
->expr_type
= EXPR_FUNCTION
;
2169 e4
->symtree
= symtree4
;
2170 e4
->value
.function
.actual
= gfc_get_actual_arglist ();
2171 e4
->value
.function
.actual
->expr
= e3
;
2172 e4
->value
.function
.actual
->next
= gfc_get_actual_arglist ();
2173 e4
->value
.function
.actual
->next
->expr
= e1
;
2175 if (OMP_CLAUSE_REDUCTION_CODE (c
) != ERROR_MARK
)
2177 /* e1 and e3 have been stored as arguments of e4, avoid sharing. */
2178 e1
= gfc_copy_expr (e1
);
2179 e3
= gfc_copy_expr (e3
);
2180 t
= gfc_resolve_expr (e4
);
2184 /* Create the init statement list. */
2187 stmt
= gfc_trans_assignment (e1
, e2
, false, false);
2189 stmt
= gfc_trans_call (n
->u2
.udr
->initializer
, false,
2190 NULL_TREE
, NULL_TREE
, false);
2191 if (TREE_CODE (stmt
) != BIND_EXPR
)
2192 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
2195 OMP_CLAUSE_REDUCTION_INIT (c
) = stmt
;
2197 /* Create the merge statement list. */
2200 stmt
= gfc_trans_assignment (e3
, e4
, false, true);
2202 stmt
= gfc_trans_call (n
->u2
.udr
->combiner
, false,
2203 NULL_TREE
, NULL_TREE
, false);
2204 if (TREE_CODE (stmt
) != BIND_EXPR
)
2205 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
2208 OMP_CLAUSE_REDUCTION_MERGE (c
) = stmt
;
2210 /* And stick the placeholder VAR_DECL into the clause as well. */
2211 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = outer_decl
;
2213 gfc_current_locus
= old_loc
;
2226 gfc_free_array_spec (outer_sym
.as
);
2230 *udr
->omp_out
= omp_var_copy
[0];
2231 *udr
->omp_in
= omp_var_copy
[1];
2232 if (udr
->initializer_ns
)
2234 *udr
->omp_priv
= omp_var_copy
[2];
2235 *udr
->omp_orig
= omp_var_copy
[3];
2241 gfc_trans_omp_reduction_list (int kind
, gfc_omp_namelist
*namelist
, tree list
,
2242 locus where
, bool mark_addressable
)
2244 omp_clause_code clause
= OMP_CLAUSE_REDUCTION
;
2247 case OMP_LIST_REDUCTION
:
2248 case OMP_LIST_REDUCTION_INSCAN
:
2249 case OMP_LIST_REDUCTION_TASK
:
2251 case OMP_LIST_IN_REDUCTION
:
2252 clause
= OMP_CLAUSE_IN_REDUCTION
;
2254 case OMP_LIST_TASK_REDUCTION
:
2255 clause
= OMP_CLAUSE_TASK_REDUCTION
;
2260 for (; namelist
!= NULL
; namelist
= namelist
->next
)
2261 if (namelist
->sym
->attr
.referenced
)
2263 tree t
= gfc_trans_omp_variable (namelist
->sym
, false);
2264 if (t
!= error_mark_node
)
2266 tree node
= build_omp_clause (gfc_get_location (&namelist
->where
),
2268 OMP_CLAUSE_DECL (node
) = t
;
2269 if (mark_addressable
)
2270 TREE_ADDRESSABLE (t
) = 1;
2271 if (kind
== OMP_LIST_REDUCTION_INSCAN
)
2272 OMP_CLAUSE_REDUCTION_INSCAN (node
) = 1;
2273 if (kind
== OMP_LIST_REDUCTION_TASK
)
2274 OMP_CLAUSE_REDUCTION_TASK (node
) = 1;
2275 switch (namelist
->u
.reduction_op
)
2277 case OMP_REDUCTION_PLUS
:
2278 OMP_CLAUSE_REDUCTION_CODE (node
) = PLUS_EXPR
;
2280 case OMP_REDUCTION_MINUS
:
2281 OMP_CLAUSE_REDUCTION_CODE (node
) = MINUS_EXPR
;
2283 case OMP_REDUCTION_TIMES
:
2284 OMP_CLAUSE_REDUCTION_CODE (node
) = MULT_EXPR
;
2286 case OMP_REDUCTION_AND
:
2287 OMP_CLAUSE_REDUCTION_CODE (node
) = TRUTH_ANDIF_EXPR
;
2289 case OMP_REDUCTION_OR
:
2290 OMP_CLAUSE_REDUCTION_CODE (node
) = TRUTH_ORIF_EXPR
;
2292 case OMP_REDUCTION_EQV
:
2293 OMP_CLAUSE_REDUCTION_CODE (node
) = EQ_EXPR
;
2295 case OMP_REDUCTION_NEQV
:
2296 OMP_CLAUSE_REDUCTION_CODE (node
) = NE_EXPR
;
2298 case OMP_REDUCTION_MAX
:
2299 OMP_CLAUSE_REDUCTION_CODE (node
) = MAX_EXPR
;
2301 case OMP_REDUCTION_MIN
:
2302 OMP_CLAUSE_REDUCTION_CODE (node
) = MIN_EXPR
;
2304 case OMP_REDUCTION_IAND
:
2305 OMP_CLAUSE_REDUCTION_CODE (node
) = BIT_AND_EXPR
;
2307 case OMP_REDUCTION_IOR
:
2308 OMP_CLAUSE_REDUCTION_CODE (node
) = BIT_IOR_EXPR
;
2310 case OMP_REDUCTION_IEOR
:
2311 OMP_CLAUSE_REDUCTION_CODE (node
) = BIT_XOR_EXPR
;
2313 case OMP_REDUCTION_USER
:
2314 OMP_CLAUSE_REDUCTION_CODE (node
) = ERROR_MARK
;
2319 if (namelist
->sym
->attr
.dimension
2320 || namelist
->u
.reduction_op
== OMP_REDUCTION_USER
2321 || namelist
->sym
->attr
.allocatable
)
2322 gfc_trans_omp_array_reduction_or_udr (node
, namelist
, where
);
2323 list
= gfc_trans_add_clause (node
, list
);
2330 gfc_convert_expr_to_tree (stmtblock_t
*block
, gfc_expr
*expr
)
2335 gfc_init_se (&se
, NULL
);
2336 gfc_conv_expr (&se
, expr
);
2337 gfc_add_block_to_block (block
, &se
.pre
);
2338 result
= gfc_evaluate_now (se
.expr
, block
);
2339 gfc_add_block_to_block (block
, &se
.post
);
2344 static vec
<tree
, va_heap
, vl_embed
> *doacross_steps
;
2347 /* Translate an array section or array element. */
2350 gfc_trans_omp_array_section (stmtblock_t
*block
, gfc_omp_namelist
*n
,
2351 tree decl
, bool element
, gomp_map_kind ptr_kind
,
2352 tree
&node
, tree
&node2
, tree
&node3
, tree
&node4
)
2356 tree elemsz
= NULL_TREE
;
2358 gfc_init_se (&se
, NULL
);
2362 gfc_conv_expr_reference (&se
, n
->expr
);
2363 gfc_add_block_to_block (block
, &se
.pre
);
2365 OMP_CLAUSE_SIZE (node
) = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ptr
)));
2366 elemsz
= OMP_CLAUSE_SIZE (node
);
2370 gfc_conv_expr_descriptor (&se
, n
->expr
);
2371 ptr
= gfc_conv_array_data (se
.expr
);
2372 tree type
= TREE_TYPE (se
.expr
);
2373 gfc_add_block_to_block (block
, &se
.pre
);
2374 OMP_CLAUSE_SIZE (node
) = gfc_full_array_size (block
, se
.expr
,
2375 GFC_TYPE_ARRAY_RANK (type
));
2376 elemsz
= TYPE_SIZE_UNIT (gfc_get_element_type (type
));
2377 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
2378 OMP_CLAUSE_SIZE (node
) = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
2379 OMP_CLAUSE_SIZE (node
), elemsz
);
2381 gcc_assert (se
.post
.head
== NULL_TREE
);
2382 ptr
= fold_convert (build_pointer_type (char_type_node
), ptr
);
2383 OMP_CLAUSE_DECL (node
) = build_fold_indirect_ref (ptr
);
2384 ptr
= fold_convert (ptrdiff_type_node
, ptr
);
2386 if (POINTER_TYPE_P (TREE_TYPE (decl
))
2387 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (decl
)))
2388 && ptr_kind
== GOMP_MAP_POINTER
)
2390 node4
= build_omp_clause (input_location
,
2392 OMP_CLAUSE_SET_MAP_KIND (node4
, GOMP_MAP_POINTER
);
2393 OMP_CLAUSE_DECL (node4
) = decl
;
2394 OMP_CLAUSE_SIZE (node4
) = size_int (0);
2395 decl
= build_fold_indirect_ref (decl
);
2397 else if (ptr_kind
== GOMP_MAP_ALWAYS_POINTER
2398 && n
->expr
->ts
.type
== BT_CHARACTER
2399 && n
->expr
->ts
.deferred
)
2401 gomp_map_kind map_kind
;
2402 if (GOMP_MAP_COPY_TO_P (OMP_CLAUSE_MAP_KIND (node
)))
2403 map_kind
= GOMP_MAP_TO
;
2404 else if (OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_RELEASE
2405 || OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_DELETE
)
2406 map_kind
= OMP_CLAUSE_MAP_KIND (node
);
2408 map_kind
= GOMP_MAP_ALLOC
;
2409 gcc_assert (se
.string_length
);
2410 node4
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
2411 OMP_CLAUSE_SET_MAP_KIND (node4
, map_kind
);
2412 OMP_CLAUSE_DECL (node4
) = se
.string_length
;
2413 OMP_CLAUSE_SIZE (node4
) = TYPE_SIZE_UNIT (gfc_charlen_type_node
);
2415 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
2418 tree type
= TREE_TYPE (decl
);
2419 ptr2
= gfc_conv_descriptor_data_get (decl
);
2420 desc_node
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
2421 OMP_CLAUSE_DECL (desc_node
) = decl
;
2422 OMP_CLAUSE_SIZE (desc_node
) = TYPE_SIZE_UNIT (type
);
2423 if (ptr_kind
== GOMP_MAP_ALWAYS_POINTER
)
2425 OMP_CLAUSE_SET_MAP_KIND (desc_node
, GOMP_MAP_TO
);
2427 node
= desc_node
; /* Needs to come first. */
2431 OMP_CLAUSE_SET_MAP_KIND (desc_node
, GOMP_MAP_TO_PSET
);
2434 node3
= build_omp_clause (input_location
,
2436 OMP_CLAUSE_SET_MAP_KIND (node3
, ptr_kind
);
2437 OMP_CLAUSE_DECL (node3
)
2438 = gfc_conv_descriptor_data_get (decl
);
2439 /* This purposely does not include GOMP_MAP_ALWAYS_POINTER. The extra
2440 cast prevents gimplify.c from recognising it as being part of the
2441 struct – and adding an 'alloc: for the 'desc.data' pointer, which
2442 would break as the 'desc' (the descriptor) is also mapped
2443 (see node4 above). */
2444 if (ptr_kind
== GOMP_MAP_ATTACH_DETACH
)
2445 STRIP_NOPS (OMP_CLAUSE_DECL (node3
));
2449 if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
2452 ptr2
= build_fold_addr_expr (decl
);
2453 offset
= fold_build2 (MINUS_EXPR
, ptrdiff_type_node
, ptr
,
2454 fold_convert (ptrdiff_type_node
, ptr2
));
2455 offset
= build2 (TRUNC_DIV_EXPR
, ptrdiff_type_node
,
2456 offset
, fold_convert (ptrdiff_type_node
, elemsz
));
2457 offset
= build4_loc (input_location
, ARRAY_REF
,
2458 TREE_TYPE (TREE_TYPE (decl
)),
2459 decl
, offset
, NULL_TREE
, NULL_TREE
);
2460 OMP_CLAUSE_DECL (node
) = offset
;
2464 gcc_assert (POINTER_TYPE_P (TREE_TYPE (decl
)));
2467 node3
= build_omp_clause (input_location
,
2469 OMP_CLAUSE_SET_MAP_KIND (node3
, ptr_kind
);
2470 OMP_CLAUSE_DECL (node3
) = decl
;
2472 ptr2
= fold_convert (ptrdiff_type_node
, ptr2
);
2473 OMP_CLAUSE_SIZE (node3
) = fold_build2 (MINUS_EXPR
, ptrdiff_type_node
,
2478 handle_iterator (gfc_namespace
*ns
, stmtblock_t
*iter_block
, tree block
)
2480 tree list
= NULL_TREE
;
2481 for (gfc_symbol
*sym
= ns
->proc_name
; sym
; sym
= sym
->tlink
)
2486 tree last
= make_tree_vec (6);
2487 tree iter_var
= gfc_get_symbol_decl (sym
);
2488 tree type
= TREE_TYPE (iter_var
);
2489 TREE_VEC_ELT (last
, 0) = iter_var
;
2490 DECL_CHAIN (iter_var
) = BLOCK_VARS (block
);
2491 BLOCK_VARS (block
) = iter_var
;
2494 c
= gfc_constructor_first (sym
->value
->value
.constructor
);
2495 gfc_init_se (&se
, NULL
);
2496 gfc_conv_expr (&se
, c
->expr
);
2497 gfc_add_block_to_block (iter_block
, &se
.pre
);
2498 gfc_add_block_to_block (iter_block
, &se
.post
);
2499 TREE_VEC_ELT (last
, 1) = fold_convert (type
,
2500 gfc_evaluate_now (se
.expr
,
2503 c
= gfc_constructor_next (c
);
2504 gfc_init_se (&se
, NULL
);
2505 gfc_conv_expr (&se
, c
->expr
);
2506 gfc_add_block_to_block (iter_block
, &se
.pre
);
2507 gfc_add_block_to_block (iter_block
, &se
.post
);
2508 TREE_VEC_ELT (last
, 2) = fold_convert (type
,
2509 gfc_evaluate_now (se
.expr
,
2512 c
= gfc_constructor_next (c
);
2516 gfc_init_se (&se
, NULL
);
2517 gfc_conv_expr (&se
, c
->expr
);
2518 gfc_add_block_to_block (iter_block
, &se
.pre
);
2519 gfc_add_block_to_block (iter_block
, &se
.post
);
2520 gfc_conv_expr (&se
, c
->expr
);
2521 step
= fold_convert (type
,
2522 gfc_evaluate_now (se
.expr
,
2526 step
= build_int_cst (type
, 1);
2527 TREE_VEC_ELT (last
, 3) = step
;
2529 TREE_VEC_ELT (last
, 4) = save_expr (step
);
2530 TREE_CHAIN (last
) = list
;
2537 gfc_trans_omp_clauses (stmtblock_t
*block
, gfc_omp_clauses
*clauses
,
2538 locus where
, bool declare_simd
= false,
2539 bool openacc
= false)
2541 tree omp_clauses
= NULL_TREE
, prev_clauses
, chunk_size
, c
;
2542 tree iterator
= NULL_TREE
;
2543 tree tree_block
= NULL_TREE
;
2544 stmtblock_t iter_block
;
2546 enum omp_clause_code clause_code
;
2547 gfc_omp_namelist
*prev
= NULL
;
2550 if (clauses
== NULL
)
2553 for (list
= 0; list
< OMP_LIST_NUM
; list
++)
2555 gfc_omp_namelist
*n
= clauses
->lists
[list
];
2561 case OMP_LIST_REDUCTION
:
2562 case OMP_LIST_REDUCTION_INSCAN
:
2563 case OMP_LIST_REDUCTION_TASK
:
2564 case OMP_LIST_IN_REDUCTION
:
2565 case OMP_LIST_TASK_REDUCTION
:
2566 /* An OpenACC async clause indicates the need to set reduction
2567 arguments addressable, to allow asynchronous copy-out. */
2568 omp_clauses
= gfc_trans_omp_reduction_list (list
, n
, omp_clauses
,
2569 where
, clauses
->async
);
2571 case OMP_LIST_PRIVATE
:
2572 clause_code
= OMP_CLAUSE_PRIVATE
;
2574 case OMP_LIST_SHARED
:
2575 clause_code
= OMP_CLAUSE_SHARED
;
2577 case OMP_LIST_FIRSTPRIVATE
:
2578 clause_code
= OMP_CLAUSE_FIRSTPRIVATE
;
2580 case OMP_LIST_LASTPRIVATE
:
2581 clause_code
= OMP_CLAUSE_LASTPRIVATE
;
2583 case OMP_LIST_COPYIN
:
2584 clause_code
= OMP_CLAUSE_COPYIN
;
2586 case OMP_LIST_COPYPRIVATE
:
2587 clause_code
= OMP_CLAUSE_COPYPRIVATE
;
2589 case OMP_LIST_UNIFORM
:
2590 clause_code
= OMP_CLAUSE_UNIFORM
;
2592 case OMP_LIST_USE_DEVICE
:
2593 case OMP_LIST_USE_DEVICE_PTR
:
2594 clause_code
= OMP_CLAUSE_USE_DEVICE_PTR
;
2596 case OMP_LIST_USE_DEVICE_ADDR
:
2597 clause_code
= OMP_CLAUSE_USE_DEVICE_ADDR
;
2599 case OMP_LIST_IS_DEVICE_PTR
:
2600 clause_code
= OMP_CLAUSE_IS_DEVICE_PTR
;
2602 case OMP_LIST_NONTEMPORAL
:
2603 clause_code
= OMP_CLAUSE_NONTEMPORAL
;
2605 case OMP_LIST_SCAN_IN
:
2606 clause_code
= OMP_CLAUSE_INCLUSIVE
;
2608 case OMP_LIST_SCAN_EX
:
2609 clause_code
= OMP_CLAUSE_EXCLUSIVE
;
2614 = gfc_trans_omp_variable_list (clause_code
, n
, omp_clauses
,
2617 case OMP_LIST_ALIGNED
:
2618 for (; n
!= NULL
; n
= n
->next
)
2619 if (n
->sym
->attr
.referenced
|| declare_simd
)
2621 tree t
= gfc_trans_omp_variable (n
->sym
, declare_simd
);
2622 if (t
!= error_mark_node
)
2624 tree node
= build_omp_clause (input_location
,
2625 OMP_CLAUSE_ALIGNED
);
2626 OMP_CLAUSE_DECL (node
) = t
;
2632 alignment_var
= gfc_conv_constant_to_tree (n
->expr
);
2635 gfc_init_se (&se
, NULL
);
2636 gfc_conv_expr (&se
, n
->expr
);
2637 gfc_add_block_to_block (block
, &se
.pre
);
2638 alignment_var
= gfc_evaluate_now (se
.expr
, block
);
2639 gfc_add_block_to_block (block
, &se
.post
);
2641 OMP_CLAUSE_ALIGNED_ALIGNMENT (node
) = alignment_var
;
2643 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
2647 case OMP_LIST_LINEAR
:
2649 gfc_expr
*last_step_expr
= NULL
;
2650 tree last_step
= NULL_TREE
;
2651 bool last_step_parm
= false;
2653 for (; n
!= NULL
; n
= n
->next
)
2657 last_step_expr
= n
->expr
;
2658 last_step
= NULL_TREE
;
2659 last_step_parm
= false;
2661 if (n
->sym
->attr
.referenced
|| declare_simd
)
2663 tree t
= gfc_trans_omp_variable (n
->sym
, declare_simd
);
2664 if (t
!= error_mark_node
)
2666 tree node
= build_omp_clause (input_location
,
2668 OMP_CLAUSE_DECL (node
) = t
;
2669 omp_clause_linear_kind kind
;
2670 switch (n
->u
.linear_op
)
2672 case OMP_LINEAR_DEFAULT
:
2673 kind
= OMP_CLAUSE_LINEAR_DEFAULT
;
2675 case OMP_LINEAR_REF
:
2676 kind
= OMP_CLAUSE_LINEAR_REF
;
2678 case OMP_LINEAR_VAL
:
2679 kind
= OMP_CLAUSE_LINEAR_VAL
;
2681 case OMP_LINEAR_UVAL
:
2682 kind
= OMP_CLAUSE_LINEAR_UVAL
;
2687 OMP_CLAUSE_LINEAR_KIND (node
) = kind
;
2688 if (last_step_expr
&& last_step
== NULL_TREE
)
2692 gfc_init_se (&se
, NULL
);
2693 gfc_conv_expr (&se
, last_step_expr
);
2694 gfc_add_block_to_block (block
, &se
.pre
);
2695 last_step
= gfc_evaluate_now (se
.expr
, block
);
2696 gfc_add_block_to_block (block
, &se
.post
);
2698 else if (last_step_expr
->expr_type
== EXPR_VARIABLE
)
2700 gfc_symbol
*s
= last_step_expr
->symtree
->n
.sym
;
2701 last_step
= gfc_trans_omp_variable (s
, true);
2702 last_step_parm
= true;
2706 = gfc_conv_constant_to_tree (last_step_expr
);
2710 OMP_CLAUSE_LINEAR_VARIABLE_STRIDE (node
) = 1;
2711 OMP_CLAUSE_LINEAR_STEP (node
) = last_step
;
2715 if (kind
== OMP_CLAUSE_LINEAR_REF
)
2718 if (n
->sym
->attr
.flavor
== FL_PROCEDURE
)
2720 type
= gfc_get_function_type (n
->sym
);
2721 type
= build_pointer_type (type
);
2724 type
= gfc_sym_type (n
->sym
);
2725 if (POINTER_TYPE_P (type
))
2726 type
= TREE_TYPE (type
);
2727 /* Otherwise to be determined what exactly
2729 tree t
= fold_convert (sizetype
, last_step
);
2730 t
= size_binop (MULT_EXPR
, t
,
2731 TYPE_SIZE_UNIT (type
));
2732 OMP_CLAUSE_LINEAR_STEP (node
) = t
;
2737 = gfc_typenode_for_spec (&n
->sym
->ts
);
2738 OMP_CLAUSE_LINEAR_STEP (node
)
2739 = fold_convert (type
, last_step
);
2742 if (n
->sym
->attr
.dimension
|| n
->sym
->attr
.allocatable
)
2743 OMP_CLAUSE_LINEAR_ARRAY (node
) = 1;
2744 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
2750 case OMP_LIST_AFFINITY
:
2751 case OMP_LIST_DEPEND
:
2752 iterator
= NULL_TREE
;
2754 prev_clauses
= omp_clauses
;
2755 for (; n
!= NULL
; n
= n
->next
)
2757 if (iterator
&& prev
->u2
.ns
!= n
->u2
.ns
)
2759 BLOCK_SUBBLOCKS (tree_block
) = gfc_finish_block (&iter_block
);
2760 TREE_VEC_ELT (iterator
, 5) = tree_block
;
2761 for (tree c
= omp_clauses
; c
!= prev_clauses
;
2762 c
= OMP_CLAUSE_CHAIN (c
))
2763 OMP_CLAUSE_DECL (c
) = build_tree_list (iterator
,
2764 OMP_CLAUSE_DECL (c
));
2765 prev_clauses
= omp_clauses
;
2766 iterator
= NULL_TREE
;
2768 if (n
->u2
.ns
&& (!prev
|| prev
->u2
.ns
!= n
->u2
.ns
))
2770 gfc_init_block (&iter_block
);
2771 tree_block
= make_node (BLOCK
);
2772 TREE_USED (tree_block
) = 1;
2773 BLOCK_VARS (tree_block
) = NULL_TREE
;
2774 iterator
= handle_iterator (n
->u2
.ns
, block
,
2778 gfc_init_block (&iter_block
);
2780 if (list
== OMP_LIST_DEPEND
2781 && n
->u
.depend_op
== OMP_DEPEND_SINK_FIRST
)
2783 tree vec
= NULL_TREE
;
2787 tree addend
= integer_zero_node
, t
;
2791 addend
= gfc_conv_constant_to_tree (n
->expr
);
2792 if (TREE_CODE (addend
) == INTEGER_CST
2793 && tree_int_cst_sgn (addend
) == -1)
2796 addend
= const_unop (NEGATE_EXPR
,
2797 TREE_TYPE (addend
), addend
);
2800 t
= gfc_trans_omp_variable (n
->sym
, false);
2801 if (t
!= error_mark_node
)
2803 if (i
< vec_safe_length (doacross_steps
)
2804 && !integer_zerop (addend
)
2805 && (*doacross_steps
)[i
])
2807 tree step
= (*doacross_steps
)[i
];
2808 addend
= fold_convert (TREE_TYPE (step
), addend
);
2809 addend
= build2 (TRUNC_DIV_EXPR
,
2810 TREE_TYPE (step
), addend
, step
);
2812 vec
= tree_cons (addend
, t
, vec
);
2814 OMP_CLAUSE_DEPEND_SINK_NEGATIVE (vec
) = 1;
2817 || n
->next
->u
.depend_op
!= OMP_DEPEND_SINK
)
2821 if (vec
== NULL_TREE
)
2824 tree node
= build_omp_clause (input_location
,
2826 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_SINK
;
2827 OMP_CLAUSE_DECL (node
) = nreverse (vec
);
2828 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
2832 if (!n
->sym
->attr
.referenced
)
2835 tree node
= build_omp_clause (input_location
,
2836 list
== OMP_LIST_DEPEND
2838 : OMP_CLAUSE_AFFINITY
);
2839 if (n
->expr
== NULL
|| n
->expr
->ref
->u
.ar
.type
== AR_FULL
)
2841 tree decl
= gfc_trans_omp_variable (n
->sym
, false);
2842 if (gfc_omp_privatize_by_reference (decl
))
2843 decl
= build_fold_indirect_ref (decl
);
2844 if (n
->u
.depend_op
== OMP_DEPEND_DEPOBJ
2845 && POINTER_TYPE_P (TREE_TYPE (decl
)))
2846 decl
= build_fold_indirect_ref (decl
);
2847 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
2849 decl
= gfc_conv_descriptor_data_get (decl
);
2850 decl
= fold_convert (build_pointer_type (char_type_node
),
2852 decl
= build_fold_indirect_ref (decl
);
2854 else if (DECL_P (decl
))
2855 TREE_ADDRESSABLE (decl
) = 1;
2856 OMP_CLAUSE_DECL (node
) = decl
;
2861 gfc_init_se (&se
, NULL
);
2862 if (n
->expr
->ref
->u
.ar
.type
== AR_ELEMENT
)
2864 gfc_conv_expr_reference (&se
, n
->expr
);
2869 gfc_conv_expr_descriptor (&se
, n
->expr
);
2870 ptr
= gfc_conv_array_data (se
.expr
);
2872 gfc_add_block_to_block (&iter_block
, &se
.pre
);
2873 gfc_add_block_to_block (&iter_block
, &se
.post
);
2874 ptr
= fold_convert (build_pointer_type (char_type_node
),
2876 OMP_CLAUSE_DECL (node
) = build_fold_indirect_ref (ptr
);
2878 if (list
== OMP_LIST_DEPEND
)
2879 switch (n
->u
.depend_op
)
2882 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_IN
;
2884 case OMP_DEPEND_OUT
:
2885 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_OUT
;
2887 case OMP_DEPEND_INOUT
:
2888 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_INOUT
;
2890 case OMP_DEPEND_MUTEXINOUTSET
:
2891 OMP_CLAUSE_DEPEND_KIND (node
)
2892 = OMP_CLAUSE_DEPEND_MUTEXINOUTSET
;
2894 case OMP_DEPEND_DEPOBJ
:
2895 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_DEPOBJ
;
2901 gfc_add_block_to_block (block
, &iter_block
);
2902 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
2906 BLOCK_SUBBLOCKS (tree_block
) = gfc_finish_block (&iter_block
);
2907 TREE_VEC_ELT (iterator
, 5) = tree_block
;
2908 for (tree c
= omp_clauses
; c
!= prev_clauses
;
2909 c
= OMP_CLAUSE_CHAIN (c
))
2910 OMP_CLAUSE_DECL (c
) = build_tree_list (iterator
,
2911 OMP_CLAUSE_DECL (c
));
2915 for (; n
!= NULL
; n
= n
->next
)
2917 if (!n
->sym
->attr
.referenced
)
2920 bool always_modifier
= false;
2921 tree node
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
2922 tree node2
= NULL_TREE
;
2923 tree node3
= NULL_TREE
;
2924 tree node4
= NULL_TREE
;
2926 /* OpenMP: automatically map pointer targets with the pointer;
2927 hence, always update the descriptor/pointer itself. */
2929 && ((n
->expr
== NULL
&& n
->sym
->attr
.pointer
)
2930 || (n
->expr
&& gfc_expr_attr (n
->expr
).pointer
)))
2931 always_modifier
= true;
2933 switch (n
->u
.map_op
)
2936 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_ALLOC
);
2938 case OMP_MAP_IF_PRESENT
:
2939 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_IF_PRESENT
);
2941 case OMP_MAP_ATTACH
:
2942 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_ATTACH
);
2945 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_TO
);
2948 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FROM
);
2950 case OMP_MAP_TOFROM
:
2951 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_TOFROM
);
2953 case OMP_MAP_ALWAYS_TO
:
2954 always_modifier
= true;
2955 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_ALWAYS_TO
);
2957 case OMP_MAP_ALWAYS_FROM
:
2958 always_modifier
= true;
2959 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_ALWAYS_FROM
);
2961 case OMP_MAP_ALWAYS_TOFROM
:
2962 always_modifier
= true;
2963 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_ALWAYS_TOFROM
);
2965 case OMP_MAP_RELEASE
:
2966 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_RELEASE
);
2968 case OMP_MAP_DELETE
:
2969 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_DELETE
);
2971 case OMP_MAP_DETACH
:
2972 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_DETACH
);
2974 case OMP_MAP_FORCE_ALLOC
:
2975 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_ALLOC
);
2977 case OMP_MAP_FORCE_TO
:
2978 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_TO
);
2980 case OMP_MAP_FORCE_FROM
:
2981 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_FROM
);
2983 case OMP_MAP_FORCE_TOFROM
:
2984 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_TOFROM
);
2986 case OMP_MAP_FORCE_PRESENT
:
2987 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_PRESENT
);
2989 case OMP_MAP_FORCE_DEVICEPTR
:
2990 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_DEVICEPTR
);
2996 tree decl
= gfc_trans_omp_variable (n
->sym
, false);
2998 TREE_ADDRESSABLE (decl
) = 1;
3000 gfc_ref
*lastref
= NULL
;
3003 for (gfc_ref
*ref
= n
->expr
->ref
; ref
; ref
= ref
->next
)
3004 if (ref
->type
== REF_COMPONENT
|| ref
->type
== REF_ARRAY
)
3007 bool allocatable
= false, pointer
= false;
3009 if (lastref
&& lastref
->type
== REF_COMPONENT
)
3011 gfc_component
*c
= lastref
->u
.c
.component
;
3013 if (c
->ts
.type
== BT_CLASS
)
3015 pointer
= CLASS_DATA (c
)->attr
.class_pointer
;
3016 allocatable
= CLASS_DATA (c
)->attr
.allocatable
;
3020 pointer
= c
->attr
.pointer
;
3021 allocatable
= c
->attr
.allocatable
;
3026 || (n
->expr
->ref
->type
== REF_ARRAY
3027 && n
->expr
->ref
->u
.ar
.type
== AR_FULL
))
3029 tree present
= gfc_omp_check_optional_argument (decl
, true);
3030 if (openacc
&& n
->sym
->ts
.type
== BT_CLASS
)
3032 tree type
= TREE_TYPE (decl
);
3033 if (n
->sym
->attr
.optional
)
3034 sorry ("optional class parameter");
3035 if (POINTER_TYPE_P (type
))
3037 node4
= build_omp_clause (input_location
,
3039 OMP_CLAUSE_SET_MAP_KIND (node4
, GOMP_MAP_POINTER
);
3040 OMP_CLAUSE_DECL (node4
) = decl
;
3041 OMP_CLAUSE_SIZE (node4
) = size_int (0);
3042 decl
= build_fold_indirect_ref (decl
);
3044 tree ptr
= gfc_class_data_get (decl
);
3045 ptr
= build_fold_indirect_ref (ptr
);
3046 OMP_CLAUSE_DECL (node
) = ptr
;
3047 OMP_CLAUSE_SIZE (node
) = gfc_class_vtab_size_get (decl
);
3048 node2
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
3049 OMP_CLAUSE_SET_MAP_KIND (node2
, GOMP_MAP_TO_PSET
);
3050 OMP_CLAUSE_DECL (node2
) = decl
;
3051 OMP_CLAUSE_SIZE (node2
) = TYPE_SIZE_UNIT (type
);
3052 node3
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
3053 OMP_CLAUSE_SET_MAP_KIND (node3
, GOMP_MAP_ATTACH_DETACH
);
3054 OMP_CLAUSE_DECL (node3
) = gfc_class_data_get (decl
);
3055 OMP_CLAUSE_SIZE (node3
) = size_int (0);
3056 goto finalize_map_clause
;
3058 else if (POINTER_TYPE_P (TREE_TYPE (decl
))
3059 && (gfc_omp_privatize_by_reference (decl
)
3060 || GFC_DECL_GET_SCALAR_POINTER (decl
)
3061 || GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
3062 || GFC_DECL_CRAY_POINTEE (decl
)
3063 || GFC_DESCRIPTOR_TYPE_P
3064 (TREE_TYPE (TREE_TYPE (decl
)))
3065 || n
->sym
->ts
.type
== BT_DERIVED
))
3067 tree orig_decl
= decl
;
3069 /* For nonallocatable, nonpointer arrays, a temporary
3070 variable is generated, but this one is only defined if
3071 the variable is present; hence, we now set it to NULL
3072 to avoid accessing undefined variables. We cannot use
3073 a temporary variable here as otherwise the replacement
3074 of the variables in omp-low.c will not work. */
3075 if (present
&& GFC_ARRAY_TYPE_P (TREE_TYPE (decl
)))
3077 tree tmp
= fold_build2_loc (input_location
,
3079 void_type_node
, decl
,
3081 tree cond
= fold_build1_loc (input_location
,
3085 gfc_add_expr_to_block (block
,
3086 build3_loc (input_location
,
3092 node4
= build_omp_clause (input_location
,
3094 OMP_CLAUSE_SET_MAP_KIND (node4
, GOMP_MAP_POINTER
);
3095 OMP_CLAUSE_DECL (node4
) = decl
;
3096 OMP_CLAUSE_SIZE (node4
) = size_int (0);
3097 decl
= build_fold_indirect_ref (decl
);
3098 if ((TREE_CODE (TREE_TYPE (orig_decl
)) == REFERENCE_TYPE
3099 || gfc_omp_is_optional_argument (orig_decl
))
3100 && (GFC_DECL_GET_SCALAR_POINTER (orig_decl
)
3101 || GFC_DECL_GET_SCALAR_ALLOCATABLE (orig_decl
)))
3103 node3
= build_omp_clause (input_location
,
3105 OMP_CLAUSE_SET_MAP_KIND (node3
, GOMP_MAP_POINTER
);
3106 OMP_CLAUSE_DECL (node3
) = decl
;
3107 OMP_CLAUSE_SIZE (node3
) = size_int (0);
3108 decl
= build_fold_indirect_ref (decl
);
3111 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
3113 tree type
= TREE_TYPE (decl
);
3114 tree ptr
= gfc_conv_descriptor_data_get (decl
);
3116 ptr
= gfc_build_cond_assign_expr (block
, present
, ptr
,
3118 ptr
= fold_convert (build_pointer_type (char_type_node
),
3120 ptr
= build_fold_indirect_ref (ptr
);
3121 OMP_CLAUSE_DECL (node
) = ptr
;
3122 node2
= build_omp_clause (input_location
,
3124 OMP_CLAUSE_SET_MAP_KIND (node2
, GOMP_MAP_TO_PSET
);
3125 OMP_CLAUSE_DECL (node2
) = decl
;
3126 OMP_CLAUSE_SIZE (node2
) = TYPE_SIZE_UNIT (type
);
3127 node3
= build_omp_clause (input_location
,
3131 ptr
= gfc_conv_descriptor_data_get (decl
);
3132 ptr
= gfc_build_addr_expr (NULL
, ptr
);
3133 ptr
= gfc_build_cond_assign_expr (block
, present
, ptr
,
3135 ptr
= build_fold_indirect_ref (ptr
);
3136 OMP_CLAUSE_DECL (node3
) = ptr
;
3139 OMP_CLAUSE_DECL (node3
)
3140 = gfc_conv_descriptor_data_get (decl
);
3141 OMP_CLAUSE_SIZE (node3
) = size_int (0);
3142 if (n
->u
.map_op
== OMP_MAP_ATTACH
)
3144 /* Standalone attach clauses used with arrays with
3145 descriptors must copy the descriptor to the target,
3146 else they won't have anything to perform the
3147 attachment onto (see OpenACC 2.6, "2.6.3. Data
3148 Structures with Pointers"). */
3149 OMP_CLAUSE_SET_MAP_KIND (node3
, GOMP_MAP_ATTACH
);
3150 /* We don't want to map PTR at all in this case, so
3151 delete its node and shuffle the others down. */
3155 goto finalize_map_clause
;
3157 else if (n
->u
.map_op
== OMP_MAP_DETACH
)
3159 OMP_CLAUSE_SET_MAP_KIND (node3
, GOMP_MAP_DETACH
);
3160 /* Similarly to above, we don't want to unmap PTR
3165 goto finalize_map_clause
;
3168 OMP_CLAUSE_SET_MAP_KIND (node3
,
3170 ? GOMP_MAP_ALWAYS_POINTER
3171 : GOMP_MAP_POINTER
);
3173 /* We have to check for n->sym->attr.dimension because
3174 of scalar coarrays. */
3175 if (n
->sym
->attr
.pointer
&& n
->sym
->attr
.dimension
)
3177 stmtblock_t cond_block
;
3179 = gfc_create_var (gfc_array_index_type
, NULL
);
3180 tree tem
, then_b
, else_b
, zero
, cond
;
3182 gfc_init_block (&cond_block
);
3184 = gfc_full_array_size (&cond_block
, decl
,
3185 GFC_TYPE_ARRAY_RANK (type
));
3186 gfc_add_modify (&cond_block
, size
, tem
);
3187 then_b
= gfc_finish_block (&cond_block
);
3188 gfc_init_block (&cond_block
);
3189 zero
= build_int_cst (gfc_array_index_type
, 0);
3190 gfc_add_modify (&cond_block
, size
, zero
);
3191 else_b
= gfc_finish_block (&cond_block
);
3192 tem
= gfc_conv_descriptor_data_get (decl
);
3193 tem
= fold_convert (pvoid_type_node
, tem
);
3194 cond
= fold_build2_loc (input_location
, NE_EXPR
,
3196 tem
, null_pointer_node
);
3198 cond
= fold_build2_loc (input_location
,
3202 gfc_add_expr_to_block (block
,
3203 build3_loc (input_location
,
3208 OMP_CLAUSE_SIZE (node
) = size
;
3210 else if (n
->sym
->attr
.dimension
)
3212 stmtblock_t cond_block
;
3213 gfc_init_block (&cond_block
);
3214 tree size
= gfc_full_array_size (&cond_block
, decl
,
3215 GFC_TYPE_ARRAY_RANK (type
));
3218 tree var
= gfc_create_var (gfc_array_index_type
,
3220 gfc_add_modify (&cond_block
, var
, size
);
3221 tree cond_body
= gfc_finish_block (&cond_block
);
3222 tree cond
= build3_loc (input_location
, COND_EXPR
,
3223 void_type_node
, present
,
3224 cond_body
, NULL_TREE
);
3225 gfc_add_expr_to_block (block
, cond
);
3226 OMP_CLAUSE_SIZE (node
) = var
;
3230 gfc_add_block_to_block (block
, &cond_block
);
3231 OMP_CLAUSE_SIZE (node
) = size
;
3234 if (n
->sym
->attr
.dimension
)
3237 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
3238 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
3239 OMP_CLAUSE_SIZE (node
)
3240 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
3241 OMP_CLAUSE_SIZE (node
), elemsz
);
3245 && TREE_CODE (decl
) == INDIRECT_REF
3246 && (TREE_CODE (TREE_OPERAND (decl
, 0))
3249 /* A single indirectref is handled by the middle end. */
3250 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl
)));
3251 decl
= TREE_OPERAND (decl
, 0);
3252 decl
= gfc_build_cond_assign_expr (block
, present
, decl
,
3254 OMP_CLAUSE_DECL (node
) = build_fold_indirect_ref (decl
);
3257 OMP_CLAUSE_DECL (node
) = decl
;
3260 && n
->expr
->expr_type
== EXPR_VARIABLE
3261 && n
->expr
->ref
->type
== REF_ARRAY
3262 && !n
->expr
->ref
->next
)
3264 /* An array element or array section which is not part of a
3265 derived type, etc. */
3266 bool element
= n
->expr
->ref
->u
.ar
.type
== AR_ELEMENT
;
3267 gfc_trans_omp_array_section (block
, n
, decl
, element
,
3268 GOMP_MAP_POINTER
, node
, node2
,
3272 && n
->expr
->expr_type
== EXPR_VARIABLE
3273 && (n
->expr
->ref
->type
== REF_COMPONENT
3274 || n
->expr
->ref
->type
== REF_ARRAY
)
3276 && lastref
->type
== REF_COMPONENT
3277 && lastref
->u
.c
.component
->ts
.type
!= BT_CLASS
3278 && lastref
->u
.c
.component
->ts
.type
!= BT_DERIVED
3279 && !lastref
->u
.c
.component
->attr
.dimension
)
3281 /* Derived type access with last component being a scalar. */
3282 gfc_init_se (&se
, NULL
);
3284 gfc_conv_expr (&se
, n
->expr
);
3285 gfc_add_block_to_block (block
, &se
.pre
);
3286 /* For BT_CHARACTER a pointer is returned. */
3287 OMP_CLAUSE_DECL (node
)
3288 = POINTER_TYPE_P (TREE_TYPE (se
.expr
))
3289 ? build_fold_indirect_ref (se
.expr
) : se
.expr
;
3290 gfc_add_block_to_block (block
, &se
.post
);
3291 if (pointer
|| allocatable
)
3293 node2
= build_omp_clause (input_location
,
3296 = (openacc
? GOMP_MAP_ATTACH_DETACH
3297 : GOMP_MAP_ALWAYS_POINTER
);
3298 OMP_CLAUSE_SET_MAP_KIND (node2
, kind
);
3299 OMP_CLAUSE_DECL (node2
)
3300 = POINTER_TYPE_P (TREE_TYPE (se
.expr
))
3302 : gfc_build_addr_expr (NULL
, se
.expr
);
3303 OMP_CLAUSE_SIZE (node2
) = size_int (0);
3305 && n
->expr
->ts
.type
== BT_CHARACTER
3306 && n
->expr
->ts
.deferred
)
3308 gcc_assert (se
.string_length
);
3310 = gfc_get_char_type (n
->expr
->ts
.kind
);
3311 OMP_CLAUSE_SIZE (node
)
3312 = fold_build2 (MULT_EXPR
, size_type_node
,
3313 fold_convert (size_type_node
,
3315 TYPE_SIZE_UNIT (tmp
));
3316 node3
= build_omp_clause (input_location
,
3318 OMP_CLAUSE_SET_MAP_KIND (node3
, GOMP_MAP_TO
);
3319 OMP_CLAUSE_DECL (node3
) = se
.string_length
;
3320 OMP_CLAUSE_SIZE (node3
)
3321 = TYPE_SIZE_UNIT (gfc_charlen_type_node
);
3326 && n
->expr
->expr_type
== EXPR_VARIABLE
3327 && (n
->expr
->ref
->type
== REF_COMPONENT
3328 || n
->expr
->ref
->type
== REF_ARRAY
))
3330 gfc_init_se (&se
, NULL
);
3331 se
.expr
= gfc_maybe_dereference_var (n
->sym
, decl
);
3333 for (gfc_ref
*ref
= n
->expr
->ref
; ref
; ref
= ref
->next
)
3335 if (ref
->type
== REF_COMPONENT
)
3337 if (ref
->u
.c
.sym
->attr
.extension
)
3338 conv_parent_component_references (&se
, ref
);
3340 gfc_conv_component_ref (&se
, ref
);
3342 else if (ref
->type
== REF_ARRAY
)
3344 if (ref
->u
.ar
.type
== AR_ELEMENT
&& ref
->next
)
3345 gfc_conv_array_ref (&se
, &ref
->u
.ar
, n
->expr
,
3348 gcc_assert (!ref
->next
);
3351 sorry ("unhandled expression type");
3354 tree inner
= se
.expr
;
3356 /* Last component is a derived type or class pointer. */
3357 if (lastref
->type
== REF_COMPONENT
3358 && (lastref
->u
.c
.component
->ts
.type
== BT_DERIVED
3359 || lastref
->u
.c
.component
->ts
.type
== BT_CLASS
))
3361 if (pointer
|| (openacc
&& allocatable
))
3365 if (lastref
->u
.c
.component
->ts
.type
== BT_CLASS
)
3367 data
= gfc_class_data_get (inner
);
3368 gcc_assert (POINTER_TYPE_P (TREE_TYPE (data
)));
3369 data
= build_fold_indirect_ref (data
);
3370 size
= gfc_class_vtab_size_get (inner
);
3372 else /* BT_DERIVED. */
3375 size
= TYPE_SIZE_UNIT (TREE_TYPE (inner
));
3378 OMP_CLAUSE_DECL (node
) = data
;
3379 OMP_CLAUSE_SIZE (node
) = size
;
3380 node2
= build_omp_clause (input_location
,
3382 OMP_CLAUSE_SET_MAP_KIND (node2
,
3384 ? GOMP_MAP_ATTACH_DETACH
3385 : GOMP_MAP_ALWAYS_POINTER
);
3386 OMP_CLAUSE_DECL (node2
) = build_fold_addr_expr (data
);
3387 OMP_CLAUSE_SIZE (node2
) = size_int (0);
3391 OMP_CLAUSE_DECL (node
) = inner
;
3392 OMP_CLAUSE_SIZE (node
)
3393 = TYPE_SIZE_UNIT (TREE_TYPE (inner
));
3396 else if (lastref
->type
== REF_ARRAY
3397 && lastref
->u
.ar
.type
== AR_FULL
)
3399 /* Just pass the (auto-dereferenced) decl through for
3400 bare attach and detach clauses. */
3401 if (n
->u
.map_op
== OMP_MAP_ATTACH
3402 || n
->u
.map_op
== OMP_MAP_DETACH
)
3404 OMP_CLAUSE_DECL (node
) = inner
;
3405 OMP_CLAUSE_SIZE (node
) = size_zero_node
;
3406 goto finalize_map_clause
;
3409 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (inner
)))
3411 gomp_map_kind map_kind
;
3413 tree type
= TREE_TYPE (inner
);
3414 tree ptr
= gfc_conv_descriptor_data_get (inner
);
3415 ptr
= build_fold_indirect_ref (ptr
);
3416 OMP_CLAUSE_DECL (node
) = ptr
;
3417 int rank
= GFC_TYPE_ARRAY_RANK (type
);
3418 OMP_CLAUSE_SIZE (node
)
3419 = gfc_full_array_size (block
, inner
, rank
);
3421 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
3422 if (GOMP_MAP_COPY_TO_P (OMP_CLAUSE_MAP_KIND (node
)))
3423 map_kind
= GOMP_MAP_TO
;
3424 else if (n
->u
.map_op
== OMP_MAP_RELEASE
3425 || n
->u
.map_op
== OMP_MAP_DELETE
)
3426 map_kind
= OMP_CLAUSE_MAP_KIND (node
);
3428 map_kind
= GOMP_MAP_ALLOC
;
3430 && n
->expr
->ts
.type
== BT_CHARACTER
3431 && n
->expr
->ts
.deferred
)
3433 gcc_assert (se
.string_length
);
3434 tree len
= fold_convert (size_type_node
,
3436 elemsz
= gfc_get_char_type (n
->expr
->ts
.kind
);
3437 elemsz
= TYPE_SIZE_UNIT (elemsz
);
3438 elemsz
= fold_build2 (MULT_EXPR
, size_type_node
,
3440 node4
= build_omp_clause (input_location
,
3442 OMP_CLAUSE_SET_MAP_KIND (node4
, map_kind
);
3443 OMP_CLAUSE_DECL (node4
) = se
.string_length
;
3444 OMP_CLAUSE_SIZE (node4
)
3445 = TYPE_SIZE_UNIT (gfc_charlen_type_node
);
3447 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
3448 OMP_CLAUSE_SIZE (node
)
3449 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
3450 OMP_CLAUSE_SIZE (node
), elemsz
);
3451 desc_node
= build_omp_clause (input_location
,
3454 OMP_CLAUSE_SET_MAP_KIND (desc_node
,
3457 OMP_CLAUSE_SET_MAP_KIND (desc_node
, map_kind
);
3458 OMP_CLAUSE_DECL (desc_node
) = inner
;
3459 OMP_CLAUSE_SIZE (desc_node
) = TYPE_SIZE_UNIT (type
);
3465 node
= desc_node
; /* Put first. */
3467 node3
= build_omp_clause (input_location
,
3469 OMP_CLAUSE_SET_MAP_KIND (node3
,
3471 ? GOMP_MAP_ATTACH_DETACH
3472 : GOMP_MAP_ALWAYS_POINTER
);
3473 OMP_CLAUSE_DECL (node3
)
3474 = gfc_conv_descriptor_data_get (inner
);
3475 /* Similar to gfc_trans_omp_array_section (details
3476 there), we add/keep the cast for OpenMP to prevent
3477 that an 'alloc:' gets added for node3 ('desc.data')
3478 as that is part of the whole descriptor (node3).
3479 TODO: Remove once the ME handles this properly. */
3481 OMP_CLAUSE_DECL (node3
)
3482 = fold_convert (TREE_TYPE (TREE_OPERAND(ptr
, 0)),
3483 OMP_CLAUSE_DECL (node3
));
3485 STRIP_NOPS (OMP_CLAUSE_DECL (node3
));
3486 OMP_CLAUSE_SIZE (node3
) = size_int (0);
3489 OMP_CLAUSE_DECL (node
) = inner
;
3491 else if (lastref
->type
== REF_ARRAY
)
3493 /* An array element or section. */
3494 bool element
= lastref
->u
.ar
.type
== AR_ELEMENT
;
3495 gomp_map_kind kind
= (openacc
? GOMP_MAP_ATTACH_DETACH
3496 : GOMP_MAP_ALWAYS_POINTER
);
3497 gfc_trans_omp_array_section (block
, n
, inner
, element
,
3498 kind
, node
, node2
, node3
,
3505 sorry ("unhandled expression");
3507 finalize_map_clause
:
3509 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
3511 omp_clauses
= gfc_trans_add_clause (node2
, omp_clauses
);
3513 omp_clauses
= gfc_trans_add_clause (node3
, omp_clauses
);
3515 omp_clauses
= gfc_trans_add_clause (node4
, omp_clauses
);
3520 case OMP_LIST_CACHE
:
3521 for (; n
!= NULL
; n
= n
->next
)
3523 if (!n
->sym
->attr
.referenced
)
3529 clause_code
= OMP_CLAUSE_TO
;
3532 clause_code
= OMP_CLAUSE_FROM
;
3534 case OMP_LIST_CACHE
:
3535 clause_code
= OMP_CLAUSE__CACHE_
;
3540 tree node
= build_omp_clause (input_location
, clause_code
);
3541 if (n
->expr
== NULL
|| n
->expr
->ref
->u
.ar
.type
== AR_FULL
)
3543 tree decl
= gfc_trans_omp_variable (n
->sym
, false);
3544 if (gfc_omp_privatize_by_reference (decl
))
3546 if (gfc_omp_is_allocatable_or_ptr (decl
))
3547 decl
= build_fold_indirect_ref (decl
);
3548 decl
= build_fold_indirect_ref (decl
);
3550 else if (DECL_P (decl
))
3551 TREE_ADDRESSABLE (decl
) = 1;
3552 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
3554 tree type
= TREE_TYPE (decl
);
3555 tree ptr
= gfc_conv_descriptor_data_get (decl
);
3556 ptr
= fold_convert (build_pointer_type (char_type_node
),
3558 ptr
= build_fold_indirect_ref (ptr
);
3559 OMP_CLAUSE_DECL (node
) = ptr
;
3560 OMP_CLAUSE_SIZE (node
)
3561 = gfc_full_array_size (block
, decl
,
3562 GFC_TYPE_ARRAY_RANK (type
));
3564 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
3565 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
3566 OMP_CLAUSE_SIZE (node
)
3567 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
3568 OMP_CLAUSE_SIZE (node
), elemsz
);
3572 OMP_CLAUSE_DECL (node
) = decl
;
3573 if (gfc_omp_is_allocatable_or_ptr (decl
))
3574 OMP_CLAUSE_SIZE (node
)
3575 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)));
3581 gfc_init_se (&se
, NULL
);
3582 if (n
->expr
->ref
->u
.ar
.type
== AR_ELEMENT
)
3584 gfc_conv_expr_reference (&se
, n
->expr
);
3586 gfc_add_block_to_block (block
, &se
.pre
);
3587 OMP_CLAUSE_SIZE (node
)
3588 = TYPE_SIZE_UNIT (TREE_TYPE (ptr
));
3592 gfc_conv_expr_descriptor (&se
, n
->expr
);
3593 ptr
= gfc_conv_array_data (se
.expr
);
3594 tree type
= TREE_TYPE (se
.expr
);
3595 gfc_add_block_to_block (block
, &se
.pre
);
3596 OMP_CLAUSE_SIZE (node
)
3597 = gfc_full_array_size (block
, se
.expr
,
3598 GFC_TYPE_ARRAY_RANK (type
));
3600 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
3601 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
3602 OMP_CLAUSE_SIZE (node
)
3603 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
3604 OMP_CLAUSE_SIZE (node
), elemsz
);
3606 gfc_add_block_to_block (block
, &se
.post
);
3607 ptr
= fold_convert (build_pointer_type (char_type_node
),
3609 OMP_CLAUSE_DECL (node
) = build_fold_indirect_ref (ptr
);
3611 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
3619 if (clauses
->if_expr
)
3623 gfc_init_se (&se
, NULL
);
3624 gfc_conv_expr (&se
, clauses
->if_expr
);
3625 gfc_add_block_to_block (block
, &se
.pre
);
3626 if_var
= gfc_evaluate_now (se
.expr
, block
);
3627 gfc_add_block_to_block (block
, &se
.post
);
3629 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_IF
);
3630 OMP_CLAUSE_IF_MODIFIER (c
) = ERROR_MARK
;
3631 OMP_CLAUSE_IF_EXPR (c
) = if_var
;
3632 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3634 for (ifc
= 0; ifc
< OMP_IF_LAST
; ifc
++)
3635 if (clauses
->if_exprs
[ifc
])
3639 gfc_init_se (&se
, NULL
);
3640 gfc_conv_expr (&se
, clauses
->if_exprs
[ifc
]);
3641 gfc_add_block_to_block (block
, &se
.pre
);
3642 if_var
= gfc_evaluate_now (se
.expr
, block
);
3643 gfc_add_block_to_block (block
, &se
.post
);
3645 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_IF
);
3649 OMP_CLAUSE_IF_MODIFIER (c
) = VOID_CST
;
3651 case OMP_IF_PARALLEL
:
3652 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_PARALLEL
;
3655 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_SIMD
;
3658 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TASK
;
3660 case OMP_IF_TASKLOOP
:
3661 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TASKLOOP
;
3664 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TARGET
;
3666 case OMP_IF_TARGET_DATA
:
3667 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TARGET_DATA
;
3669 case OMP_IF_TARGET_UPDATE
:
3670 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TARGET_UPDATE
;
3672 case OMP_IF_TARGET_ENTER_DATA
:
3673 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TARGET_ENTER_DATA
;
3675 case OMP_IF_TARGET_EXIT_DATA
:
3676 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TARGET_EXIT_DATA
;
3681 OMP_CLAUSE_IF_EXPR (c
) = if_var
;
3682 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3685 if (clauses
->final_expr
)
3689 gfc_init_se (&se
, NULL
);
3690 gfc_conv_expr (&se
, clauses
->final_expr
);
3691 gfc_add_block_to_block (block
, &se
.pre
);
3692 final_var
= gfc_evaluate_now (se
.expr
, block
);
3693 gfc_add_block_to_block (block
, &se
.post
);
3695 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_FINAL
);
3696 OMP_CLAUSE_FINAL_EXPR (c
) = final_var
;
3697 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3700 if (clauses
->num_threads
)
3704 gfc_init_se (&se
, NULL
);
3705 gfc_conv_expr (&se
, clauses
->num_threads
);
3706 gfc_add_block_to_block (block
, &se
.pre
);
3707 num_threads
= gfc_evaluate_now (se
.expr
, block
);
3708 gfc_add_block_to_block (block
, &se
.post
);
3710 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_NUM_THREADS
);
3711 OMP_CLAUSE_NUM_THREADS_EXPR (c
) = num_threads
;
3712 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3715 chunk_size
= NULL_TREE
;
3716 if (clauses
->chunk_size
)
3718 gfc_init_se (&se
, NULL
);
3719 gfc_conv_expr (&se
, clauses
->chunk_size
);
3720 gfc_add_block_to_block (block
, &se
.pre
);
3721 chunk_size
= gfc_evaluate_now (se
.expr
, block
);
3722 gfc_add_block_to_block (block
, &se
.post
);
3725 if (clauses
->sched_kind
!= OMP_SCHED_NONE
)
3727 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_SCHEDULE
);
3728 OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c
) = chunk_size
;
3729 switch (clauses
->sched_kind
)
3731 case OMP_SCHED_STATIC
:
3732 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_STATIC
;
3734 case OMP_SCHED_DYNAMIC
:
3735 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_DYNAMIC
;
3737 case OMP_SCHED_GUIDED
:
3738 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_GUIDED
;
3740 case OMP_SCHED_RUNTIME
:
3741 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_RUNTIME
;
3743 case OMP_SCHED_AUTO
:
3744 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_AUTO
;
3749 if (clauses
->sched_monotonic
)
3750 OMP_CLAUSE_SCHEDULE_KIND (c
)
3751 = (omp_clause_schedule_kind
) (OMP_CLAUSE_SCHEDULE_KIND (c
)
3752 | OMP_CLAUSE_SCHEDULE_MONOTONIC
);
3753 else if (clauses
->sched_nonmonotonic
)
3754 OMP_CLAUSE_SCHEDULE_KIND (c
)
3755 = (omp_clause_schedule_kind
) (OMP_CLAUSE_SCHEDULE_KIND (c
)
3756 | OMP_CLAUSE_SCHEDULE_NONMONOTONIC
);
3757 if (clauses
->sched_simd
)
3758 OMP_CLAUSE_SCHEDULE_SIMD (c
) = 1;
3759 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3762 if (clauses
->default_sharing
!= OMP_DEFAULT_UNKNOWN
)
3764 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_DEFAULT
);
3765 switch (clauses
->default_sharing
)
3767 case OMP_DEFAULT_NONE
:
3768 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_NONE
;
3770 case OMP_DEFAULT_SHARED
:
3771 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_SHARED
;
3773 case OMP_DEFAULT_PRIVATE
:
3774 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_PRIVATE
;
3776 case OMP_DEFAULT_FIRSTPRIVATE
:
3777 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
3779 case OMP_DEFAULT_PRESENT
:
3780 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_PRESENT
;
3785 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3788 if (clauses
->nowait
)
3790 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_NOWAIT
);
3791 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3794 if (clauses
->ordered
)
3796 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_ORDERED
);
3797 OMP_CLAUSE_ORDERED_EXPR (c
)
3798 = clauses
->orderedc
? build_int_cst (integer_type_node
,
3799 clauses
->orderedc
) : NULL_TREE
;
3800 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3803 if (clauses
->order_concurrent
)
3805 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_ORDER
);
3806 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3809 if (clauses
->untied
)
3811 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_UNTIED
);
3812 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3815 if (clauses
->mergeable
)
3817 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_MERGEABLE
);
3818 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3821 if (clauses
->collapse
)
3823 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_COLLAPSE
);
3824 OMP_CLAUSE_COLLAPSE_EXPR (c
)
3825 = build_int_cst (integer_type_node
, clauses
->collapse
);
3826 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3829 if (clauses
->inbranch
)
3831 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_INBRANCH
);
3832 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3835 if (clauses
->notinbranch
)
3837 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_NOTINBRANCH
);
3838 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3841 switch (clauses
->cancel
)
3843 case OMP_CANCEL_UNKNOWN
:
3845 case OMP_CANCEL_PARALLEL
:
3846 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_PARALLEL
);
3847 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3849 case OMP_CANCEL_SECTIONS
:
3850 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_SECTIONS
);
3851 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3854 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_FOR
);
3855 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3857 case OMP_CANCEL_TASKGROUP
:
3858 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_TASKGROUP
);
3859 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3863 if (clauses
->proc_bind
!= OMP_PROC_BIND_UNKNOWN
)
3865 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_PROC_BIND
);
3866 switch (clauses
->proc_bind
)
3868 case OMP_PROC_BIND_PRIMARY
:
3869 OMP_CLAUSE_PROC_BIND_KIND (c
) = OMP_CLAUSE_PROC_BIND_PRIMARY
;
3871 case OMP_PROC_BIND_MASTER
:
3872 OMP_CLAUSE_PROC_BIND_KIND (c
) = OMP_CLAUSE_PROC_BIND_MASTER
;
3874 case OMP_PROC_BIND_SPREAD
:
3875 OMP_CLAUSE_PROC_BIND_KIND (c
) = OMP_CLAUSE_PROC_BIND_SPREAD
;
3877 case OMP_PROC_BIND_CLOSE
:
3878 OMP_CLAUSE_PROC_BIND_KIND (c
) = OMP_CLAUSE_PROC_BIND_CLOSE
;
3883 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3886 if (clauses
->safelen_expr
)
3890 gfc_init_se (&se
, NULL
);
3891 gfc_conv_expr (&se
, clauses
->safelen_expr
);
3892 gfc_add_block_to_block (block
, &se
.pre
);
3893 safelen_var
= gfc_evaluate_now (se
.expr
, block
);
3894 gfc_add_block_to_block (block
, &se
.post
);
3896 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_SAFELEN
);
3897 OMP_CLAUSE_SAFELEN_EXPR (c
) = safelen_var
;
3898 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3901 if (clauses
->simdlen_expr
)
3905 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_SIMDLEN
);
3906 OMP_CLAUSE_SIMDLEN_EXPR (c
)
3907 = gfc_conv_constant_to_tree (clauses
->simdlen_expr
);
3908 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3914 gfc_init_se (&se
, NULL
);
3915 gfc_conv_expr (&se
, clauses
->simdlen_expr
);
3916 gfc_add_block_to_block (block
, &se
.pre
);
3917 simdlen_var
= gfc_evaluate_now (se
.expr
, block
);
3918 gfc_add_block_to_block (block
, &se
.post
);
3920 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_SIMDLEN
);
3921 OMP_CLAUSE_SIMDLEN_EXPR (c
) = simdlen_var
;
3922 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3926 if (clauses
->num_teams
)
3930 gfc_init_se (&se
, NULL
);
3931 gfc_conv_expr (&se
, clauses
->num_teams
);
3932 gfc_add_block_to_block (block
, &se
.pre
);
3933 num_teams
= gfc_evaluate_now (se
.expr
, block
);
3934 gfc_add_block_to_block (block
, &se
.post
);
3936 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_NUM_TEAMS
);
3937 OMP_CLAUSE_NUM_TEAMS_EXPR (c
) = num_teams
;
3938 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3941 if (clauses
->device
)
3945 gfc_init_se (&se
, NULL
);
3946 gfc_conv_expr (&se
, clauses
->device
);
3947 gfc_add_block_to_block (block
, &se
.pre
);
3948 device
= gfc_evaluate_now (se
.expr
, block
);
3949 gfc_add_block_to_block (block
, &se
.post
);
3951 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_DEVICE
);
3952 OMP_CLAUSE_DEVICE_ID (c
) = device
;
3953 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3956 if (clauses
->thread_limit
)
3960 gfc_init_se (&se
, NULL
);
3961 gfc_conv_expr (&se
, clauses
->thread_limit
);
3962 gfc_add_block_to_block (block
, &se
.pre
);
3963 thread_limit
= gfc_evaluate_now (se
.expr
, block
);
3964 gfc_add_block_to_block (block
, &se
.post
);
3966 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_THREAD_LIMIT
);
3967 OMP_CLAUSE_THREAD_LIMIT_EXPR (c
) = thread_limit
;
3968 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3971 chunk_size
= NULL_TREE
;
3972 if (clauses
->dist_chunk_size
)
3974 gfc_init_se (&se
, NULL
);
3975 gfc_conv_expr (&se
, clauses
->dist_chunk_size
);
3976 gfc_add_block_to_block (block
, &se
.pre
);
3977 chunk_size
= gfc_evaluate_now (se
.expr
, block
);
3978 gfc_add_block_to_block (block
, &se
.post
);
3981 if (clauses
->dist_sched_kind
!= OMP_SCHED_NONE
)
3983 c
= build_omp_clause (gfc_get_location (&where
),
3984 OMP_CLAUSE_DIST_SCHEDULE
);
3985 OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (c
) = chunk_size
;
3986 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3989 if (clauses
->grainsize
)
3993 gfc_init_se (&se
, NULL
);
3994 gfc_conv_expr (&se
, clauses
->grainsize
);
3995 gfc_add_block_to_block (block
, &se
.pre
);
3996 grainsize
= gfc_evaluate_now (se
.expr
, block
);
3997 gfc_add_block_to_block (block
, &se
.post
);
3999 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_GRAINSIZE
);
4000 OMP_CLAUSE_GRAINSIZE_EXPR (c
) = grainsize
;
4001 if (clauses
->grainsize_strict
)
4002 OMP_CLAUSE_GRAINSIZE_STRICT (c
) = 1;
4003 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4006 if (clauses
->num_tasks
)
4010 gfc_init_se (&se
, NULL
);
4011 gfc_conv_expr (&se
, clauses
->num_tasks
);
4012 gfc_add_block_to_block (block
, &se
.pre
);
4013 num_tasks
= gfc_evaluate_now (se
.expr
, block
);
4014 gfc_add_block_to_block (block
, &se
.post
);
4016 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_NUM_TASKS
);
4017 OMP_CLAUSE_NUM_TASKS_EXPR (c
) = num_tasks
;
4018 if (clauses
->num_tasks_strict
)
4019 OMP_CLAUSE_NUM_TASKS_STRICT (c
) = 1;
4020 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4023 if (clauses
->priority
)
4027 gfc_init_se (&se
, NULL
);
4028 gfc_conv_expr (&se
, clauses
->priority
);
4029 gfc_add_block_to_block (block
, &se
.pre
);
4030 priority
= gfc_evaluate_now (se
.expr
, block
);
4031 gfc_add_block_to_block (block
, &se
.post
);
4033 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_PRIORITY
);
4034 OMP_CLAUSE_PRIORITY_EXPR (c
) = priority
;
4035 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4038 if (clauses
->detach
)
4042 gfc_init_se (&se
, NULL
);
4043 gfc_conv_expr (&se
, clauses
->detach
);
4044 gfc_add_block_to_block (block
, &se
.pre
);
4046 gfc_add_block_to_block (block
, &se
.post
);
4048 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_DETACH
);
4049 TREE_ADDRESSABLE (detach
) = 1;
4050 OMP_CLAUSE_DECL (c
) = detach
;
4051 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4054 if (clauses
->filter
)
4058 gfc_init_se (&se
, NULL
);
4059 gfc_conv_expr (&se
, clauses
->filter
);
4060 gfc_add_block_to_block (block
, &se
.pre
);
4061 filter
= gfc_evaluate_now (se
.expr
, block
);
4062 gfc_add_block_to_block (block
, &se
.post
);
4064 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_FILTER
);
4065 OMP_CLAUSE_FILTER_EXPR (c
) = filter
;
4066 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4073 gfc_init_se (&se
, NULL
);
4074 gfc_conv_expr (&se
, clauses
->hint
);
4075 gfc_add_block_to_block (block
, &se
.pre
);
4076 hint
= gfc_evaluate_now (se
.expr
, block
);
4077 gfc_add_block_to_block (block
, &se
.post
);
4079 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_HINT
);
4080 OMP_CLAUSE_HINT_EXPR (c
) = hint
;
4081 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4086 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_SIMD
);
4087 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4089 if (clauses
->threads
)
4091 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_THREADS
);
4092 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4094 if (clauses
->nogroup
)
4096 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_NOGROUP
);
4097 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4100 for (int i
= 0; i
< OMP_DEFAULTMAP_CAT_NUM
; i
++)
4102 if (clauses
->defaultmap
[i
] == OMP_DEFAULTMAP_UNSET
)
4104 enum omp_clause_defaultmap_kind behavior
, category
;
4105 switch ((gfc_omp_defaultmap_category
) i
)
4107 case OMP_DEFAULTMAP_CAT_UNCATEGORIZED
:
4108 category
= OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED
;
4110 case OMP_DEFAULTMAP_CAT_SCALAR
:
4111 category
= OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR
;
4113 case OMP_DEFAULTMAP_CAT_AGGREGATE
:
4114 category
= OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE
;
4116 case OMP_DEFAULTMAP_CAT_ALLOCATABLE
:
4117 category
= OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE
;
4119 case OMP_DEFAULTMAP_CAT_POINTER
:
4120 category
= OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER
;
4122 default: gcc_unreachable ();
4124 switch (clauses
->defaultmap
[i
])
4126 case OMP_DEFAULTMAP_ALLOC
:
4127 behavior
= OMP_CLAUSE_DEFAULTMAP_ALLOC
;
4129 case OMP_DEFAULTMAP_TO
: behavior
= OMP_CLAUSE_DEFAULTMAP_TO
; break;
4130 case OMP_DEFAULTMAP_FROM
: behavior
= OMP_CLAUSE_DEFAULTMAP_FROM
; break;
4131 case OMP_DEFAULTMAP_TOFROM
:
4132 behavior
= OMP_CLAUSE_DEFAULTMAP_TOFROM
;
4134 case OMP_DEFAULTMAP_FIRSTPRIVATE
:
4135 behavior
= OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE
;
4137 case OMP_DEFAULTMAP_NONE
: behavior
= OMP_CLAUSE_DEFAULTMAP_NONE
; break;
4138 case OMP_DEFAULTMAP_DEFAULT
:
4139 behavior
= OMP_CLAUSE_DEFAULTMAP_DEFAULT
;
4141 default: gcc_unreachable ();
4143 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_DEFAULTMAP
);
4144 OMP_CLAUSE_DEFAULTMAP_SET_KIND (c
, behavior
, category
);
4145 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4148 if (clauses
->depend_source
)
4150 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_DEPEND
);
4151 OMP_CLAUSE_DEPEND_KIND (c
) = OMP_CLAUSE_DEPEND_SOURCE
;
4152 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4157 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_ASYNC
);
4158 if (clauses
->async_expr
)
4159 OMP_CLAUSE_ASYNC_EXPR (c
)
4160 = gfc_convert_expr_to_tree (block
, clauses
->async_expr
);
4162 OMP_CLAUSE_ASYNC_EXPR (c
) = NULL
;
4163 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4167 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_SEQ
);
4168 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4170 if (clauses
->par_auto
)
4172 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_AUTO
);
4173 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4175 if (clauses
->if_present
)
4177 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_IF_PRESENT
);
4178 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4180 if (clauses
->finalize
)
4182 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_FINALIZE
);
4183 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4185 if (clauses
->independent
)
4187 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_INDEPENDENT
);
4188 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4190 if (clauses
->wait_list
)
4194 for (el
= clauses
->wait_list
; el
; el
= el
->next
)
4196 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_WAIT
);
4197 OMP_CLAUSE_DECL (c
) = gfc_convert_expr_to_tree (block
, el
->expr
);
4198 OMP_CLAUSE_CHAIN (c
) = omp_clauses
;
4202 if (clauses
->num_gangs_expr
)
4205 = gfc_convert_expr_to_tree (block
, clauses
->num_gangs_expr
);
4206 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_NUM_GANGS
);
4207 OMP_CLAUSE_NUM_GANGS_EXPR (c
) = num_gangs_var
;
4208 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4210 if (clauses
->num_workers_expr
)
4212 tree num_workers_var
4213 = gfc_convert_expr_to_tree (block
, clauses
->num_workers_expr
);
4214 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_NUM_WORKERS
);
4215 OMP_CLAUSE_NUM_WORKERS_EXPR (c
) = num_workers_var
;
4216 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4218 if (clauses
->vector_length_expr
)
4220 tree vector_length_var
4221 = gfc_convert_expr_to_tree (block
, clauses
->vector_length_expr
);
4222 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_VECTOR_LENGTH
);
4223 OMP_CLAUSE_VECTOR_LENGTH_EXPR (c
) = vector_length_var
;
4224 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4226 if (clauses
->tile_list
)
4228 vec
<tree
, va_gc
> *tvec
;
4231 vec_alloc (tvec
, 4);
4233 for (el
= clauses
->tile_list
; el
; el
= el
->next
)
4234 vec_safe_push (tvec
, gfc_convert_expr_to_tree (block
, el
->expr
));
4236 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_TILE
);
4237 OMP_CLAUSE_TILE_LIST (c
) = build_tree_list_vec (tvec
);
4238 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4241 if (clauses
->vector
)
4243 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_VECTOR
);
4244 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4246 if (clauses
->vector_expr
)
4249 = gfc_convert_expr_to_tree (block
, clauses
->vector_expr
);
4250 OMP_CLAUSE_VECTOR_EXPR (c
) = vector_var
;
4252 /* TODO: We're not capturing location information for individual
4253 clauses. However, if we have an expression attached to the
4254 clause, that one provides better location information. */
4255 OMP_CLAUSE_LOCATION (c
)
4256 = gfc_get_location (&clauses
->vector_expr
->where
);
4259 if (clauses
->worker
)
4261 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_WORKER
);
4262 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4264 if (clauses
->worker_expr
)
4267 = gfc_convert_expr_to_tree (block
, clauses
->worker_expr
);
4268 OMP_CLAUSE_WORKER_EXPR (c
) = worker_var
;
4270 /* TODO: We're not capturing location information for individual
4271 clauses. However, if we have an expression attached to the
4272 clause, that one provides better location information. */
4273 OMP_CLAUSE_LOCATION (c
)
4274 = gfc_get_location (&clauses
->worker_expr
->where
);
4280 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_GANG
);
4281 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4283 if (clauses
->gang_num_expr
)
4285 arg
= gfc_convert_expr_to_tree (block
, clauses
->gang_num_expr
);
4286 OMP_CLAUSE_GANG_EXPR (c
) = arg
;
4288 /* TODO: We're not capturing location information for individual
4289 clauses. However, if we have an expression attached to the
4290 clause, that one provides better location information. */
4291 OMP_CLAUSE_LOCATION (c
)
4292 = gfc_get_location (&clauses
->gang_num_expr
->where
);
4295 if (clauses
->gang_static
)
4297 arg
= clauses
->gang_static_expr
4298 ? gfc_convert_expr_to_tree (block
, clauses
->gang_static_expr
)
4299 : integer_minus_one_node
;
4300 OMP_CLAUSE_GANG_STATIC_EXPR (c
) = arg
;
4303 if (clauses
->bind
!= OMP_BIND_UNSET
)
4305 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_BIND
);
4306 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
4307 switch (clauses
->bind
)
4309 case OMP_BIND_TEAMS
:
4310 OMP_CLAUSE_BIND_KIND (c
) = OMP_CLAUSE_BIND_TEAMS
;
4312 case OMP_BIND_PARALLEL
:
4313 OMP_CLAUSE_BIND_KIND (c
) = OMP_CLAUSE_BIND_PARALLEL
;
4315 case OMP_BIND_THREAD
:
4316 OMP_CLAUSE_BIND_KIND (c
) = OMP_CLAUSE_BIND_THREAD
;
4322 /* OpenACC 'nohost' clauses cannot appear here. */
4323 gcc_checking_assert (!clauses
->nohost
);
4325 return nreverse (omp_clauses
);
4328 /* Like gfc_trans_code, but force creation of a BIND_EXPR around it. */
4331 gfc_trans_omp_code (gfc_code
*code
, bool force_empty
)
4336 stmt
= gfc_trans_code (code
);
4337 if (TREE_CODE (stmt
) != BIND_EXPR
)
4339 if (!IS_EMPTY_STMT (stmt
) || force_empty
)
4341 tree block
= poplevel (1, 0);
4342 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, block
);
4352 /* Translate OpenACC 'parallel', 'kernels', 'serial', 'data', 'host_data'
4356 gfc_trans_oacc_construct (gfc_code
*code
)
4359 tree stmt
, oacc_clauses
;
4360 enum tree_code construct_code
;
4364 case EXEC_OACC_PARALLEL
:
4365 construct_code
= OACC_PARALLEL
;
4367 case EXEC_OACC_KERNELS
:
4368 construct_code
= OACC_KERNELS
;
4370 case EXEC_OACC_SERIAL
:
4371 construct_code
= OACC_SERIAL
;
4373 case EXEC_OACC_DATA
:
4374 construct_code
= OACC_DATA
;
4376 case EXEC_OACC_HOST_DATA
:
4377 construct_code
= OACC_HOST_DATA
;
4383 gfc_start_block (&block
);
4384 oacc_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4385 code
->loc
, false, true);
4386 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4387 stmt
= build2_loc (gfc_get_location (&code
->loc
), construct_code
,
4388 void_type_node
, stmt
, oacc_clauses
);
4389 gfc_add_expr_to_block (&block
, stmt
);
4390 return gfc_finish_block (&block
);
4393 /* update, enter_data, exit_data, cache. */
4395 gfc_trans_oacc_executable_directive (gfc_code
*code
)
4398 tree stmt
, oacc_clauses
;
4399 enum tree_code construct_code
;
4403 case EXEC_OACC_UPDATE
:
4404 construct_code
= OACC_UPDATE
;
4406 case EXEC_OACC_ENTER_DATA
:
4407 construct_code
= OACC_ENTER_DATA
;
4409 case EXEC_OACC_EXIT_DATA
:
4410 construct_code
= OACC_EXIT_DATA
;
4412 case EXEC_OACC_CACHE
:
4413 construct_code
= OACC_CACHE
;
4419 gfc_start_block (&block
);
4420 oacc_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4421 code
->loc
, false, true);
4422 stmt
= build1_loc (input_location
, construct_code
, void_type_node
,
4424 gfc_add_expr_to_block (&block
, stmt
);
4425 return gfc_finish_block (&block
);
4429 gfc_trans_oacc_wait_directive (gfc_code
*code
)
4433 vec
<tree
, va_gc
> *args
;
4436 gfc_omp_clauses
*clauses
= code
->ext
.omp_clauses
;
4437 location_t loc
= input_location
;
4439 for (el
= clauses
->wait_list
; el
; el
= el
->next
)
4442 vec_alloc (args
, nparms
+ 2);
4443 stmt
= builtin_decl_explicit (BUILT_IN_GOACC_WAIT
);
4445 gfc_start_block (&block
);
4447 if (clauses
->async_expr
)
4448 t
= gfc_convert_expr_to_tree (&block
, clauses
->async_expr
);
4450 t
= build_int_cst (integer_type_node
, -2);
4452 args
->quick_push (t
);
4453 args
->quick_push (build_int_cst (integer_type_node
, nparms
));
4455 for (el
= clauses
->wait_list
; el
; el
= el
->next
)
4456 args
->quick_push (gfc_convert_expr_to_tree (&block
, el
->expr
));
4458 stmt
= build_call_expr_loc_vec (loc
, stmt
, args
);
4459 gfc_add_expr_to_block (&block
, stmt
);
4463 return gfc_finish_block (&block
);
4466 static tree
gfc_trans_omp_sections (gfc_code
*, gfc_omp_clauses
*);
4467 static tree
gfc_trans_omp_workshare (gfc_code
*, gfc_omp_clauses
*);
4470 gfc_trans_omp_atomic (gfc_code
*code
)
4472 gfc_code
*atomic_code
= code
->block
;
4476 gfc_expr
*expr2
, *e
;
4479 tree lhsaddr
, type
, rhs
, x
;
4480 enum tree_code op
= ERROR_MARK
;
4481 enum tree_code aop
= OMP_ATOMIC
;
4482 bool var_on_left
= false;
4483 enum omp_memory_order mo
;
4484 switch (atomic_code
->ext
.omp_clauses
->memorder
)
4486 case OMP_MEMORDER_UNSET
: mo
= OMP_MEMORY_ORDER_UNSPECIFIED
; break;
4487 case OMP_MEMORDER_ACQ_REL
: mo
= OMP_MEMORY_ORDER_ACQ_REL
; break;
4488 case OMP_MEMORDER_ACQUIRE
: mo
= OMP_MEMORY_ORDER_ACQUIRE
; break;
4489 case OMP_MEMORDER_RELAXED
: mo
= OMP_MEMORY_ORDER_RELAXED
; break;
4490 case OMP_MEMORDER_RELEASE
: mo
= OMP_MEMORY_ORDER_RELEASE
; break;
4491 case OMP_MEMORDER_SEQ_CST
: mo
= OMP_MEMORY_ORDER_SEQ_CST
; break;
4492 default: gcc_unreachable ();
4495 code
= code
->block
->next
;
4496 gcc_assert (code
->op
== EXEC_ASSIGN
);
4497 var
= code
->expr1
->symtree
->n
.sym
;
4499 gfc_init_se (&lse
, NULL
);
4500 gfc_init_se (&rse
, NULL
);
4501 gfc_init_se (&vse
, NULL
);
4502 gfc_start_block (&block
);
4504 expr2
= code
->expr2
;
4505 if (((atomic_code
->ext
.omp_clauses
->atomic_op
& GFC_OMP_ATOMIC_MASK
)
4506 != GFC_OMP_ATOMIC_WRITE
)
4507 && expr2
->expr_type
== EXPR_FUNCTION
4508 && expr2
->value
.function
.isym
4509 && expr2
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
4510 expr2
= expr2
->value
.function
.actual
->expr
;
4512 if ((atomic_code
->ext
.omp_clauses
->atomic_op
& GFC_OMP_ATOMIC_MASK
)
4513 == GFC_OMP_ATOMIC_READ
)
4515 gfc_conv_expr (&vse
, code
->expr1
);
4516 gfc_add_block_to_block (&block
, &vse
.pre
);
4518 gfc_conv_expr (&lse
, expr2
);
4519 gfc_add_block_to_block (&block
, &lse
.pre
);
4520 type
= TREE_TYPE (lse
.expr
);
4521 lhsaddr
= gfc_build_addr_expr (NULL
, lse
.expr
);
4523 x
= build1 (OMP_ATOMIC_READ
, type
, lhsaddr
);
4524 OMP_ATOMIC_MEMORY_ORDER (x
) = mo
;
4525 x
= convert (TREE_TYPE (vse
.expr
), x
);
4526 gfc_add_modify (&block
, vse
.expr
, x
);
4528 gfc_add_block_to_block (&block
, &lse
.pre
);
4529 gfc_add_block_to_block (&block
, &rse
.pre
);
4531 return gfc_finish_block (&block
);
4533 if (atomic_code
->ext
.omp_clauses
->capture
)
4535 aop
= OMP_ATOMIC_CAPTURE_NEW
;
4536 if (expr2
->expr_type
== EXPR_VARIABLE
)
4538 aop
= OMP_ATOMIC_CAPTURE_OLD
;
4539 gfc_conv_expr (&vse
, code
->expr1
);
4540 gfc_add_block_to_block (&block
, &vse
.pre
);
4542 gfc_conv_expr (&lse
, expr2
);
4543 gfc_add_block_to_block (&block
, &lse
.pre
);
4544 gfc_init_se (&lse
, NULL
);
4546 var
= code
->expr1
->symtree
->n
.sym
;
4547 expr2
= code
->expr2
;
4548 if (expr2
->expr_type
== EXPR_FUNCTION
4549 && expr2
->value
.function
.isym
4550 && expr2
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
4551 expr2
= expr2
->value
.function
.actual
->expr
;
4555 gfc_conv_expr (&lse
, code
->expr1
);
4556 gfc_add_block_to_block (&block
, &lse
.pre
);
4557 type
= TREE_TYPE (lse
.expr
);
4558 lhsaddr
= gfc_build_addr_expr (NULL
, lse
.expr
);
4560 if (((atomic_code
->ext
.omp_clauses
->atomic_op
& GFC_OMP_ATOMIC_MASK
)
4561 == GFC_OMP_ATOMIC_WRITE
)
4562 || (atomic_code
->ext
.omp_clauses
->atomic_op
& GFC_OMP_ATOMIC_SWAP
))
4564 gfc_conv_expr (&rse
, expr2
);
4565 gfc_add_block_to_block (&block
, &rse
.pre
);
4567 else if (expr2
->expr_type
== EXPR_OP
)
4570 switch (expr2
->value
.op
.op
)
4572 case INTRINSIC_PLUS
:
4575 case INTRINSIC_TIMES
:
4578 case INTRINSIC_MINUS
:
4581 case INTRINSIC_DIVIDE
:
4582 if (expr2
->ts
.type
== BT_INTEGER
)
4583 op
= TRUNC_DIV_EXPR
;
4588 op
= TRUTH_ANDIF_EXPR
;
4591 op
= TRUTH_ORIF_EXPR
;
4596 case INTRINSIC_NEQV
:
4602 e
= expr2
->value
.op
.op1
;
4603 if (e
->expr_type
== EXPR_FUNCTION
4604 && e
->value
.function
.isym
4605 && e
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
4606 e
= e
->value
.function
.actual
->expr
;
4607 if (e
->expr_type
== EXPR_VARIABLE
4608 && e
->symtree
!= NULL
4609 && e
->symtree
->n
.sym
== var
)
4611 expr2
= expr2
->value
.op
.op2
;
4616 e
= expr2
->value
.op
.op2
;
4617 if (e
->expr_type
== EXPR_FUNCTION
4618 && e
->value
.function
.isym
4619 && e
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
4620 e
= e
->value
.function
.actual
->expr
;
4621 gcc_assert (e
->expr_type
== EXPR_VARIABLE
4622 && e
->symtree
!= NULL
4623 && e
->symtree
->n
.sym
== var
);
4624 expr2
= expr2
->value
.op
.op1
;
4625 var_on_left
= false;
4627 gfc_conv_expr (&rse
, expr2
);
4628 gfc_add_block_to_block (&block
, &rse
.pre
);
4632 gcc_assert (expr2
->expr_type
== EXPR_FUNCTION
);
4633 switch (expr2
->value
.function
.isym
->id
)
4653 e
= expr2
->value
.function
.actual
->expr
;
4654 gcc_assert (e
->expr_type
== EXPR_VARIABLE
4655 && e
->symtree
!= NULL
4656 && e
->symtree
->n
.sym
== var
);
4658 gfc_conv_expr (&rse
, expr2
->value
.function
.actual
->next
->expr
);
4659 gfc_add_block_to_block (&block
, &rse
.pre
);
4660 if (expr2
->value
.function
.actual
->next
->next
!= NULL
)
4662 tree accum
= gfc_create_var (TREE_TYPE (rse
.expr
), NULL
);
4663 gfc_actual_arglist
*arg
;
4665 gfc_add_modify (&block
, accum
, rse
.expr
);
4666 for (arg
= expr2
->value
.function
.actual
->next
->next
; arg
;
4669 gfc_init_block (&rse
.pre
);
4670 gfc_conv_expr (&rse
, arg
->expr
);
4671 gfc_add_block_to_block (&block
, &rse
.pre
);
4672 x
= fold_build2_loc (input_location
, op
, TREE_TYPE (accum
),
4674 gfc_add_modify (&block
, accum
, x
);
4680 expr2
= expr2
->value
.function
.actual
->next
->expr
;
4683 lhsaddr
= save_expr (lhsaddr
);
4684 if (TREE_CODE (lhsaddr
) != SAVE_EXPR
4685 && (TREE_CODE (lhsaddr
) != ADDR_EXPR
4686 || !VAR_P (TREE_OPERAND (lhsaddr
, 0))))
4688 /* Make sure LHS is simple enough so that goa_lhs_expr_p can recognize
4689 it even after unsharing function body. */
4690 tree var
= create_tmp_var_raw (TREE_TYPE (lhsaddr
));
4691 DECL_CONTEXT (var
) = current_function_decl
;
4692 lhsaddr
= build4 (TARGET_EXPR
, TREE_TYPE (lhsaddr
), var
, lhsaddr
,
4693 NULL_TREE
, NULL_TREE
);
4696 rhs
= gfc_evaluate_now (rse
.expr
, &block
);
4698 if (((atomic_code
->ext
.omp_clauses
->atomic_op
& GFC_OMP_ATOMIC_MASK
)
4699 == GFC_OMP_ATOMIC_WRITE
)
4700 || (atomic_code
->ext
.omp_clauses
->atomic_op
& GFC_OMP_ATOMIC_SWAP
))
4704 x
= convert (TREE_TYPE (rhs
),
4705 build_fold_indirect_ref_loc (input_location
, lhsaddr
));
4707 x
= fold_build2_loc (input_location
, op
, TREE_TYPE (rhs
), x
, rhs
);
4709 x
= fold_build2_loc (input_location
, op
, TREE_TYPE (rhs
), rhs
, x
);
4712 if (TREE_CODE (TREE_TYPE (rhs
)) == COMPLEX_TYPE
4713 && TREE_CODE (type
) != COMPLEX_TYPE
)
4714 x
= fold_build1_loc (input_location
, REALPART_EXPR
,
4715 TREE_TYPE (TREE_TYPE (rhs
)), x
);
4717 gfc_add_block_to_block (&block
, &lse
.pre
);
4718 gfc_add_block_to_block (&block
, &rse
.pre
);
4720 if (aop
== OMP_ATOMIC
)
4722 x
= build2_v (OMP_ATOMIC
, lhsaddr
, convert (type
, x
));
4723 OMP_ATOMIC_MEMORY_ORDER (x
) = mo
;
4724 gfc_add_expr_to_block (&block
, x
);
4728 if (aop
== OMP_ATOMIC_CAPTURE_NEW
)
4731 expr2
= code
->expr2
;
4732 if (expr2
->expr_type
== EXPR_FUNCTION
4733 && expr2
->value
.function
.isym
4734 && expr2
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
4735 expr2
= expr2
->value
.function
.actual
->expr
;
4737 gcc_assert (expr2
->expr_type
== EXPR_VARIABLE
);
4738 gfc_conv_expr (&vse
, code
->expr1
);
4739 gfc_add_block_to_block (&block
, &vse
.pre
);
4741 gfc_init_se (&lse
, NULL
);
4742 gfc_conv_expr (&lse
, expr2
);
4743 gfc_add_block_to_block (&block
, &lse
.pre
);
4745 x
= build2 (aop
, type
, lhsaddr
, convert (type
, x
));
4746 OMP_ATOMIC_MEMORY_ORDER (x
) = mo
;
4747 x
= convert (TREE_TYPE (vse
.expr
), x
);
4748 gfc_add_modify (&block
, vse
.expr
, x
);
4751 return gfc_finish_block (&block
);
4755 gfc_trans_omp_barrier (void)
4757 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_BARRIER
);
4758 return build_call_expr_loc (input_location
, decl
, 0);
4762 gfc_trans_omp_cancel (gfc_code
*code
)
4765 tree ifc
= boolean_true_node
;
4767 switch (code
->ext
.omp_clauses
->cancel
)
4769 case OMP_CANCEL_PARALLEL
: mask
= 1; break;
4770 case OMP_CANCEL_DO
: mask
= 2; break;
4771 case OMP_CANCEL_SECTIONS
: mask
= 4; break;
4772 case OMP_CANCEL_TASKGROUP
: mask
= 8; break;
4773 default: gcc_unreachable ();
4775 gfc_start_block (&block
);
4776 if (code
->ext
.omp_clauses
->if_expr
4777 || code
->ext
.omp_clauses
->if_exprs
[OMP_IF_CANCEL
])
4782 gcc_assert ((code
->ext
.omp_clauses
->if_expr
== NULL
)
4783 ^ (code
->ext
.omp_clauses
->if_exprs
[OMP_IF_CANCEL
] == NULL
));
4784 gfc_init_se (&se
, NULL
);
4785 gfc_conv_expr (&se
, code
->ext
.omp_clauses
->if_expr
!= NULL
4786 ? code
->ext
.omp_clauses
->if_expr
4787 : code
->ext
.omp_clauses
->if_exprs
[OMP_IF_CANCEL
]);
4788 gfc_add_block_to_block (&block
, &se
.pre
);
4789 if_var
= gfc_evaluate_now (se
.expr
, &block
);
4790 gfc_add_block_to_block (&block
, &se
.post
);
4791 tree type
= TREE_TYPE (if_var
);
4792 ifc
= fold_build2_loc (input_location
, NE_EXPR
,
4793 boolean_type_node
, if_var
,
4794 build_zero_cst (type
));
4796 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCEL
);
4797 tree c_bool_type
= TREE_TYPE (TREE_TYPE (decl
));
4798 ifc
= fold_convert (c_bool_type
, ifc
);
4799 gfc_add_expr_to_block (&block
,
4800 build_call_expr_loc (input_location
, decl
, 2,
4801 build_int_cst (integer_type_node
,
4803 return gfc_finish_block (&block
);
4807 gfc_trans_omp_cancellation_point (gfc_code
*code
)
4810 switch (code
->ext
.omp_clauses
->cancel
)
4812 case OMP_CANCEL_PARALLEL
: mask
= 1; break;
4813 case OMP_CANCEL_DO
: mask
= 2; break;
4814 case OMP_CANCEL_SECTIONS
: mask
= 4; break;
4815 case OMP_CANCEL_TASKGROUP
: mask
= 8; break;
4816 default: gcc_unreachable ();
4818 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCELLATION_POINT
);
4819 return build_call_expr_loc (input_location
, decl
, 1,
4820 build_int_cst (integer_type_node
, mask
));
4824 gfc_trans_omp_critical (gfc_code
*code
)
4827 tree stmt
, name
= NULL_TREE
;
4828 if (code
->ext
.omp_clauses
->critical_name
!= NULL
)
4829 name
= get_identifier (code
->ext
.omp_clauses
->critical_name
);
4830 gfc_start_block (&block
);
4831 stmt
= make_node (OMP_CRITICAL
);
4832 TREE_TYPE (stmt
) = void_type_node
;
4833 OMP_CRITICAL_BODY (stmt
) = gfc_trans_code (code
->block
->next
);
4834 OMP_CRITICAL_NAME (stmt
) = name
;
4835 OMP_CRITICAL_CLAUSES (stmt
) = gfc_trans_omp_clauses (&block
,
4836 code
->ext
.omp_clauses
,
4838 gfc_add_expr_to_block (&block
, stmt
);
4839 return gfc_finish_block (&block
);
4842 typedef struct dovar_init_d
{
4849 gfc_trans_omp_do (gfc_code
*code
, gfc_exec_op op
, stmtblock_t
*pblock
,
4850 gfc_omp_clauses
*do_clauses
, tree par_clauses
)
4853 tree dovar
, stmt
, from
, to
, step
, type
, init
, cond
, incr
, orig_decls
;
4854 tree count
= NULL_TREE
, cycle_label
, tmp
, omp_clauses
;
4857 gfc_omp_clauses
*clauses
= code
->ext
.omp_clauses
;
4858 int i
, collapse
= clauses
->collapse
;
4859 vec
<dovar_init
> inits
= vNULL
;
4862 vec
<tree
, va_heap
, vl_embed
> *saved_doacross_steps
= doacross_steps
;
4863 gfc_expr_list
*tile
= do_clauses
? do_clauses
->tile_list
: clauses
->tile_list
;
4865 /* Both collapsed and tiled loops are lowered the same way. In
4866 OpenACC, those clauses are not compatible, so prioritize the tile
4867 clause, if present. */
4871 for (gfc_expr_list
*el
= tile
; el
; el
= el
->next
)
4875 doacross_steps
= NULL
;
4876 if (clauses
->orderedc
)
4877 collapse
= clauses
->orderedc
;
4881 code
= code
->block
->next
;
4882 gcc_assert (code
->op
== EXEC_DO
);
4884 init
= make_tree_vec (collapse
);
4885 cond
= make_tree_vec (collapse
);
4886 incr
= make_tree_vec (collapse
);
4887 orig_decls
= clauses
->orderedc
? make_tree_vec (collapse
) : NULL_TREE
;
4891 gfc_start_block (&block
);
4895 /* simd schedule modifier is only useful for composite do simd and other
4896 constructs including that, where gfc_trans_omp_do is only called
4897 on the simd construct and DO's clauses are translated elsewhere. */
4898 do_clauses
->sched_simd
= false;
4900 omp_clauses
= gfc_trans_omp_clauses (pblock
, do_clauses
, code
->loc
);
4902 for (i
= 0; i
< collapse
; i
++)
4905 int dovar_found
= 0;
4910 gfc_omp_namelist
*n
= NULL
;
4911 if (op
== EXEC_OMP_SIMD
&& collapse
== 1)
4912 for (n
= clauses
->lists
[OMP_LIST_LINEAR
];
4913 n
!= NULL
; n
= n
->next
)
4914 if (code
->ext
.iterator
->var
->symtree
->n
.sym
== n
->sym
)
4919 if (n
== NULL
&& op
!= EXEC_OMP_DISTRIBUTE
)
4920 for (n
= clauses
->lists
[OMP_LIST_LASTPRIVATE
];
4921 n
!= NULL
; n
= n
->next
)
4922 if (code
->ext
.iterator
->var
->symtree
->n
.sym
== n
->sym
)
4928 for (n
= clauses
->lists
[OMP_LIST_PRIVATE
]; n
!= NULL
; n
= n
->next
)
4929 if (code
->ext
.iterator
->var
->symtree
->n
.sym
== n
->sym
)
4936 /* Evaluate all the expressions in the iterator. */
4937 gfc_init_se (&se
, NULL
);
4938 gfc_conv_expr_lhs (&se
, code
->ext
.iterator
->var
);
4939 gfc_add_block_to_block (pblock
, &se
.pre
);
4941 type
= TREE_TYPE (dovar
);
4942 gcc_assert (TREE_CODE (type
) == INTEGER_TYPE
);
4944 gfc_init_se (&se
, NULL
);
4945 gfc_conv_expr_val (&se
, code
->ext
.iterator
->start
);
4946 gfc_add_block_to_block (pblock
, &se
.pre
);
4947 from
= gfc_evaluate_now (se
.expr
, pblock
);
4949 gfc_init_se (&se
, NULL
);
4950 gfc_conv_expr_val (&se
, code
->ext
.iterator
->end
);
4951 gfc_add_block_to_block (pblock
, &se
.pre
);
4952 to
= gfc_evaluate_now (se
.expr
, pblock
);
4954 gfc_init_se (&se
, NULL
);
4955 gfc_conv_expr_val (&se
, code
->ext
.iterator
->step
);
4956 gfc_add_block_to_block (pblock
, &se
.pre
);
4957 step
= gfc_evaluate_now (se
.expr
, pblock
);
4960 /* Special case simple loops. */
4963 if (integer_onep (step
))
4965 else if (tree_int_cst_equal (step
, integer_minus_one_node
))
4970 = gfc_trans_omp_variable (code
->ext
.iterator
->var
->symtree
->n
.sym
,
4976 TREE_VEC_ELT (init
, i
) = build2_v (MODIFY_EXPR
, dovar
, from
);
4977 /* The condition should not be folded. */
4978 TREE_VEC_ELT (cond
, i
) = build2_loc (input_location
, simple
> 0
4979 ? LE_EXPR
: GE_EXPR
,
4980 logical_type_node
, dovar
, to
);
4981 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
, PLUS_EXPR
,
4983 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
,
4986 TREE_VEC_ELT (incr
, i
));
4990 /* STEP is not 1 or -1. Use:
4991 for (count = 0; count < (to + step - from) / step; count++)
4993 dovar = from + count * step;
4997 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
, type
, step
, from
);
4998 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
, to
, tmp
);
4999 tmp
= fold_build2_loc (input_location
, TRUNC_DIV_EXPR
, type
, tmp
,
5001 tmp
= gfc_evaluate_now (tmp
, pblock
);
5002 count
= gfc_create_var (type
, "count");
5003 TREE_VEC_ELT (init
, i
) = build2_v (MODIFY_EXPR
, count
,
5004 build_int_cst (type
, 0));
5005 /* The condition should not be folded. */
5006 TREE_VEC_ELT (cond
, i
) = build2_loc (input_location
, LT_EXPR
,
5009 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
, PLUS_EXPR
,
5011 build_int_cst (type
, 1));
5012 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
,
5013 MODIFY_EXPR
, type
, count
,
5014 TREE_VEC_ELT (incr
, i
));
5016 /* Initialize DOVAR. */
5017 tmp
= fold_build2_loc (input_location
, MULT_EXPR
, type
, count
, step
);
5018 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
, from
, tmp
);
5019 dovar_init e
= {dovar
, tmp
};
5020 inits
.safe_push (e
);
5021 if (clauses
->orderedc
)
5023 if (doacross_steps
== NULL
)
5024 vec_safe_grow_cleared (doacross_steps
, clauses
->orderedc
, true);
5025 (*doacross_steps
)[i
] = step
;
5029 TREE_VEC_ELT (orig_decls
, i
) = dovar_decl
;
5031 if (dovar_found
== 3
5032 && op
== EXEC_OMP_SIMD
5036 for (tmp
= omp_clauses
; tmp
; tmp
= OMP_CLAUSE_CHAIN (tmp
))
5037 if (OMP_CLAUSE_CODE (tmp
) == OMP_CLAUSE_LINEAR
5038 && OMP_CLAUSE_DECL (tmp
) == dovar
)
5040 OMP_CLAUSE_LINEAR_NO_COPYIN (tmp
) = 1;
5044 if (!dovar_found
&& op
== EXEC_OMP_SIMD
)
5048 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
5049 OMP_CLAUSE_LINEAR_STEP (tmp
) = step
;
5050 OMP_CLAUSE_LINEAR_NO_COPYIN (tmp
) = 1;
5051 OMP_CLAUSE_DECL (tmp
) = dovar_decl
;
5052 omp_clauses
= gfc_trans_add_clause (tmp
, omp_clauses
);
5057 else if (!dovar_found
&& !simple
)
5059 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_PRIVATE
);
5060 OMP_CLAUSE_DECL (tmp
) = dovar_decl
;
5061 omp_clauses
= gfc_trans_add_clause (tmp
, omp_clauses
);
5063 if (dovar_found
> 1)
5070 /* If dovar is lastprivate, but different counter is used,
5071 dovar += step needs to be added to
5072 OMP_CLAUSE_LASTPRIVATE_STMT, otherwise the copied dovar
5073 will have the value on entry of the last loop, rather
5074 than value after iterator increment. */
5075 if (clauses
->orderedc
)
5077 if (clauses
->collapse
<= 1 || i
>= clauses
->collapse
)
5080 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
5081 type
, count
, build_one_cst (type
));
5082 tmp
= fold_build2_loc (input_location
, MULT_EXPR
, type
,
5084 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
,
5089 tmp
= gfc_evaluate_now (step
, pblock
);
5090 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
,
5093 tmp
= fold_build2_loc (input_location
, MODIFY_EXPR
, type
,
5095 for (c
= omp_clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
5096 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5097 && OMP_CLAUSE_DECL (c
) == dovar_decl
)
5099 OMP_CLAUSE_LASTPRIVATE_STMT (c
) = tmp
;
5102 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
5103 && OMP_CLAUSE_DECL (c
) == dovar_decl
)
5105 OMP_CLAUSE_LINEAR_STMT (c
) = tmp
;
5109 if (c
== NULL
&& op
== EXEC_OMP_DO
&& par_clauses
!= NULL
)
5111 for (c
= par_clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
5112 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5113 && OMP_CLAUSE_DECL (c
) == dovar_decl
)
5115 tree l
= build_omp_clause (input_location
,
5116 OMP_CLAUSE_LASTPRIVATE
);
5117 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
5118 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (l
) = 1;
5119 OMP_CLAUSE_DECL (l
) = dovar_decl
;
5120 OMP_CLAUSE_CHAIN (l
) = omp_clauses
;
5121 OMP_CLAUSE_LASTPRIVATE_STMT (l
) = tmp
;
5123 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_SHARED
);
5127 gcc_assert (simple
|| c
!= NULL
);
5131 if (op
!= EXEC_OMP_SIMD
|| dovar_found
== 1)
5132 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_PRIVATE
);
5133 else if (collapse
== 1)
5135 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
5136 OMP_CLAUSE_LINEAR_STEP (tmp
) = build_int_cst (type
, 1);
5137 OMP_CLAUSE_LINEAR_NO_COPYIN (tmp
) = 1;
5138 OMP_CLAUSE_LINEAR_NO_COPYOUT (tmp
) = 1;
5141 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LASTPRIVATE
);
5142 OMP_CLAUSE_DECL (tmp
) = count
;
5143 omp_clauses
= gfc_trans_add_clause (tmp
, omp_clauses
);
5146 if (i
+ 1 < collapse
)
5147 code
= code
->block
->next
;
5150 if (pblock
!= &block
)
5153 gfc_start_block (&block
);
5156 gfc_start_block (&body
);
5158 FOR_EACH_VEC_ELT (inits
, ix
, di
)
5159 gfc_add_modify (&body
, di
->var
, di
->init
);
5162 /* Cycle statement is implemented with a goto. Exit statement must not be
5163 present for this loop. */
5164 cycle_label
= gfc_build_label_decl (NULL_TREE
);
5166 /* Put these labels where they can be found later. */
5168 code
->cycle_label
= cycle_label
;
5169 code
->exit_label
= NULL_TREE
;
5171 /* Main loop body. */
5172 if (clauses
->lists
[OMP_LIST_REDUCTION_INSCAN
])
5174 gcc_assert (code
->block
->next
->next
->op
== EXEC_OMP_SCAN
);
5175 gcc_assert (code
->block
->next
->next
->next
->next
== NULL
);
5176 locus
*cloc
= &code
->block
->next
->next
->loc
;
5177 location_t loc
= gfc_get_location (cloc
);
5179 gfc_code code2
= *code
->block
->next
;
5181 tmp
= gfc_trans_code (&code2
);
5182 tmp
= build2 (OMP_SCAN
, void_type_node
, tmp
, NULL_TREE
);
5183 SET_EXPR_LOCATION (tmp
, loc
);
5184 gfc_add_expr_to_block (&body
, tmp
);
5185 input_location
= loc
;
5186 tree c
= gfc_trans_omp_clauses (&body
,
5187 code
->block
->next
->next
->ext
.omp_clauses
,
5189 code2
= *code
->block
->next
->next
->next
;
5191 tmp
= gfc_trans_code (&code2
);
5192 tmp
= build2 (OMP_SCAN
, void_type_node
, tmp
, c
);
5193 SET_EXPR_LOCATION (tmp
, loc
);
5196 tmp
= gfc_trans_omp_code (code
->block
->next
, true);
5197 gfc_add_expr_to_block (&body
, tmp
);
5199 /* Label for cycle statements (if needed). */
5200 if (TREE_USED (cycle_label
))
5202 tmp
= build1_v (LABEL_EXPR
, cycle_label
);
5203 gfc_add_expr_to_block (&body
, tmp
);
5206 /* End of loop body. */
5209 case EXEC_OMP_SIMD
: stmt
= make_node (OMP_SIMD
); break;
5210 case EXEC_OMP_DO
: stmt
= make_node (OMP_FOR
); break;
5211 case EXEC_OMP_DISTRIBUTE
: stmt
= make_node (OMP_DISTRIBUTE
); break;
5212 case EXEC_OMP_LOOP
: stmt
= make_node (OMP_LOOP
); break;
5213 case EXEC_OMP_TASKLOOP
: stmt
= make_node (OMP_TASKLOOP
); break;
5214 case EXEC_OACC_LOOP
: stmt
= make_node (OACC_LOOP
); break;
5215 default: gcc_unreachable ();
5218 TREE_TYPE (stmt
) = void_type_node
;
5219 OMP_FOR_BODY (stmt
) = gfc_finish_block (&body
);
5220 OMP_FOR_CLAUSES (stmt
) = omp_clauses
;
5221 OMP_FOR_INIT (stmt
) = init
;
5222 OMP_FOR_COND (stmt
) = cond
;
5223 OMP_FOR_INCR (stmt
) = incr
;
5225 OMP_FOR_ORIG_DECLS (stmt
) = orig_decls
;
5226 gfc_add_expr_to_block (&block
, stmt
);
5228 vec_free (doacross_steps
);
5229 doacross_steps
= saved_doacross_steps
;
5231 return gfc_finish_block (&block
);
5234 /* Translate combined OpenACC 'parallel loop', 'kernels loop', 'serial loop'
5238 gfc_trans_oacc_combined_directive (gfc_code
*code
)
5240 stmtblock_t block
, *pblock
= NULL
;
5241 gfc_omp_clauses construct_clauses
, loop_clauses
;
5242 tree stmt
, oacc_clauses
= NULL_TREE
;
5243 enum tree_code construct_code
;
5244 location_t loc
= input_location
;
5248 case EXEC_OACC_PARALLEL_LOOP
:
5249 construct_code
= OACC_PARALLEL
;
5251 case EXEC_OACC_KERNELS_LOOP
:
5252 construct_code
= OACC_KERNELS
;
5254 case EXEC_OACC_SERIAL_LOOP
:
5255 construct_code
= OACC_SERIAL
;
5261 gfc_start_block (&block
);
5263 memset (&loop_clauses
, 0, sizeof (loop_clauses
));
5264 if (code
->ext
.omp_clauses
!= NULL
)
5266 memcpy (&construct_clauses
, code
->ext
.omp_clauses
,
5267 sizeof (construct_clauses
));
5268 loop_clauses
.collapse
= construct_clauses
.collapse
;
5269 loop_clauses
.gang
= construct_clauses
.gang
;
5270 loop_clauses
.gang_static
= construct_clauses
.gang_static
;
5271 loop_clauses
.gang_num_expr
= construct_clauses
.gang_num_expr
;
5272 loop_clauses
.gang_static_expr
= construct_clauses
.gang_static_expr
;
5273 loop_clauses
.vector
= construct_clauses
.vector
;
5274 loop_clauses
.vector_expr
= construct_clauses
.vector_expr
;
5275 loop_clauses
.worker
= construct_clauses
.worker
;
5276 loop_clauses
.worker_expr
= construct_clauses
.worker_expr
;
5277 loop_clauses
.seq
= construct_clauses
.seq
;
5278 loop_clauses
.par_auto
= construct_clauses
.par_auto
;
5279 loop_clauses
.independent
= construct_clauses
.independent
;
5280 loop_clauses
.tile_list
= construct_clauses
.tile_list
;
5281 loop_clauses
.lists
[OMP_LIST_PRIVATE
]
5282 = construct_clauses
.lists
[OMP_LIST_PRIVATE
];
5283 loop_clauses
.lists
[OMP_LIST_REDUCTION
]
5284 = construct_clauses
.lists
[OMP_LIST_REDUCTION
];
5285 construct_clauses
.gang
= false;
5286 construct_clauses
.gang_static
= false;
5287 construct_clauses
.gang_num_expr
= NULL
;
5288 construct_clauses
.gang_static_expr
= NULL
;
5289 construct_clauses
.vector
= false;
5290 construct_clauses
.vector_expr
= NULL
;
5291 construct_clauses
.worker
= false;
5292 construct_clauses
.worker_expr
= NULL
;
5293 construct_clauses
.seq
= false;
5294 construct_clauses
.par_auto
= false;
5295 construct_clauses
.independent
= false;
5296 construct_clauses
.independent
= false;
5297 construct_clauses
.tile_list
= NULL
;
5298 construct_clauses
.lists
[OMP_LIST_PRIVATE
] = NULL
;
5299 if (construct_code
== OACC_KERNELS
)
5300 construct_clauses
.lists
[OMP_LIST_REDUCTION
] = NULL
;
5301 oacc_clauses
= gfc_trans_omp_clauses (&block
, &construct_clauses
,
5302 code
->loc
, false, true);
5304 if (!loop_clauses
.seq
)
5308 stmt
= gfc_trans_omp_do (code
, EXEC_OACC_LOOP
, pblock
, &loop_clauses
, NULL
);
5309 protected_set_expr_location (stmt
, loc
);
5310 if (TREE_CODE (stmt
) != BIND_EXPR
)
5311 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
5314 stmt
= build2_loc (loc
, construct_code
, void_type_node
, stmt
, oacc_clauses
);
5315 gfc_add_expr_to_block (&block
, stmt
);
5316 return gfc_finish_block (&block
);
5320 gfc_trans_omp_depobj (gfc_code
*code
)
5324 gfc_init_se (&se
, NULL
);
5325 gfc_init_block (&block
);
5326 gfc_conv_expr (&se
, code
->ext
.omp_clauses
->depobj
);
5327 gcc_assert (se
.pre
.head
== NULL
&& se
.post
.head
== NULL
);
5328 tree depobj
= se
.expr
;
5329 location_t loc
= EXPR_LOCATION (depobj
);
5330 if (!POINTER_TYPE_P (TREE_TYPE (depobj
)))
5331 depobj
= gfc_build_addr_expr (NULL
, depobj
);
5332 depobj
= fold_convert (build_pointer_type_for_mode (ptr_type_node
,
5333 TYPE_MODE (ptr_type_node
),
5335 gfc_omp_namelist
*n
= code
->ext
.omp_clauses
->lists
[OMP_LIST_DEPEND
];
5340 var
= gfc_convert_expr_to_tree (&block
, n
->expr
);
5342 var
= gfc_get_symbol_decl (n
->sym
);
5343 if (!POINTER_TYPE_P (TREE_TYPE (var
)))
5344 var
= gfc_build_addr_expr (NULL
, var
);
5345 depobj
= save_expr (depobj
);
5346 tree r
= build_fold_indirect_ref_loc (loc
, depobj
);
5347 gfc_add_expr_to_block (&block
,
5348 build2 (MODIFY_EXPR
, void_type_node
, r
, var
));
5351 /* Only one may be set. */
5352 gcc_assert (((int)(n
!= NULL
) + (int)(code
->ext
.omp_clauses
->destroy
)
5353 + (int)(code
->ext
.omp_clauses
->depobj_update
!= OMP_DEPEND_UNSET
))
5355 int k
= -1; /* omp_clauses->destroy */
5356 if (!code
->ext
.omp_clauses
->destroy
)
5357 switch (code
->ext
.omp_clauses
->depobj_update
!= OMP_DEPEND_UNSET
5358 ? code
->ext
.omp_clauses
->depobj_update
: n
->u
.depend_op
)
5360 case OMP_DEPEND_IN
: k
= GOMP_DEPEND_IN
; break;
5361 case OMP_DEPEND_OUT
: k
= GOMP_DEPEND_OUT
; break;
5362 case OMP_DEPEND_INOUT
: k
= GOMP_DEPEND_INOUT
; break;
5363 case OMP_DEPEND_MUTEXINOUTSET
: k
= GOMP_DEPEND_MUTEXINOUTSET
; break;
5364 default: gcc_unreachable ();
5366 tree t
= build_int_cst (ptr_type_node
, k
);
5367 depobj
= build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (depobj
), depobj
,
5368 TYPE_SIZE_UNIT (ptr_type_node
));
5369 depobj
= build_fold_indirect_ref_loc (loc
, depobj
);
5370 gfc_add_expr_to_block (&block
, build2 (MODIFY_EXPR
, void_type_node
, depobj
, t
));
5372 return gfc_finish_block (&block
);
5376 gfc_trans_omp_error (gfc_code
*code
)
5381 bool fatal
= code
->ext
.omp_clauses
->severity
== OMP_SEVERITY_FATAL
;
5382 tree fndecl
= builtin_decl_explicit (fatal
? BUILT_IN_GOMP_ERROR
5383 : BUILT_IN_GOMP_WARNING
);
5384 gfc_start_block (&block
);
5385 gfc_init_se (&se
, NULL
);
5386 if (!code
->ext
.omp_clauses
->message
)
5388 message
= null_pointer_node
;
5389 len
= build_int_cst (size_type_node
, 0);
5393 gfc_conv_expr (&se
, code
->ext
.omp_clauses
->message
);
5395 if (!POINTER_TYPE_P (TREE_TYPE (message
)))
5396 /* To ensure an ARRAY_TYPE is not passed as such. */
5397 message
= gfc_build_addr_expr (NULL
, message
);
5398 len
= se
.string_length
;
5400 gfc_add_block_to_block (&block
, &se
.pre
);
5401 gfc_add_expr_to_block (&block
, build_call_expr_loc (input_location
, fndecl
,
5403 gfc_add_block_to_block (&block
, &se
.post
);
5404 return gfc_finish_block (&block
);
5408 gfc_trans_omp_flush (gfc_code
*code
)
5411 if (!code
->ext
.omp_clauses
5412 || code
->ext
.omp_clauses
->memorder
== OMP_MEMORDER_UNSET
)
5414 call
= builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE
);
5415 call
= build_call_expr_loc (input_location
, call
, 0);
5419 enum memmodel mo
= MEMMODEL_LAST
;
5420 switch (code
->ext
.omp_clauses
->memorder
)
5422 case OMP_MEMORDER_ACQ_REL
: mo
= MEMMODEL_ACQ_REL
; break;
5423 case OMP_MEMORDER_RELEASE
: mo
= MEMMODEL_RELEASE
; break;
5424 case OMP_MEMORDER_ACQUIRE
: mo
= MEMMODEL_ACQUIRE
; break;
5425 default: gcc_unreachable (); break;
5427 call
= builtin_decl_explicit (BUILT_IN_ATOMIC_THREAD_FENCE
);
5428 call
= build_call_expr_loc (input_location
, call
, 1,
5429 build_int_cst (integer_type_node
, mo
));
5435 gfc_trans_omp_master (gfc_code
*code
)
5437 tree stmt
= gfc_trans_code (code
->block
->next
);
5438 if (IS_EMPTY_STMT (stmt
))
5440 return build1_v (OMP_MASTER
, stmt
);
5444 gfc_trans_omp_masked (gfc_code
*code
, gfc_omp_clauses
*clauses
)
5447 tree body
= gfc_trans_code (code
->block
->next
);
5448 if (IS_EMPTY_STMT (body
))
5451 clauses
= code
->ext
.omp_clauses
;
5452 gfc_start_block (&block
);
5453 tree omp_clauses
= gfc_trans_omp_clauses (&block
, clauses
, code
->loc
);
5454 tree stmt
= make_node (OMP_MASKED
);
5455 TREE_TYPE (stmt
) = void_type_node
;
5456 OMP_MASKED_BODY (stmt
) = body
;
5457 OMP_MASKED_CLAUSES (stmt
) = omp_clauses
;
5458 gfc_add_expr_to_block (&block
, stmt
);
5459 return gfc_finish_block (&block
);
5464 gfc_trans_omp_ordered (gfc_code
*code
)
5468 if (!code
->ext
.omp_clauses
->simd
)
5469 return gfc_trans_code (code
->block
? code
->block
->next
: NULL
);
5470 code
->ext
.omp_clauses
->threads
= 0;
5472 tree omp_clauses
= gfc_trans_omp_clauses (NULL
, code
->ext
.omp_clauses
,
5474 return build2_loc (input_location
, OMP_ORDERED
, void_type_node
,
5475 code
->block
? gfc_trans_code (code
->block
->next
)
5476 : NULL_TREE
, omp_clauses
);
5480 gfc_trans_omp_parallel (gfc_code
*code
)
5483 tree stmt
, omp_clauses
;
5485 gfc_start_block (&block
);
5486 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
5489 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
5490 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
5491 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
5493 gfc_add_expr_to_block (&block
, stmt
);
5494 return gfc_finish_block (&block
);
5501 GFC_OMP_SPLIT_PARALLEL
,
5502 GFC_OMP_SPLIT_DISTRIBUTE
,
5503 GFC_OMP_SPLIT_TEAMS
,
5504 GFC_OMP_SPLIT_TARGET
,
5505 GFC_OMP_SPLIT_TASKLOOP
,
5506 GFC_OMP_SPLIT_MASKED
,
5512 GFC_OMP_MASK_SIMD
= (1 << GFC_OMP_SPLIT_SIMD
),
5513 GFC_OMP_MASK_DO
= (1 << GFC_OMP_SPLIT_DO
),
5514 GFC_OMP_MASK_PARALLEL
= (1 << GFC_OMP_SPLIT_PARALLEL
),
5515 GFC_OMP_MASK_DISTRIBUTE
= (1 << GFC_OMP_SPLIT_DISTRIBUTE
),
5516 GFC_OMP_MASK_TEAMS
= (1 << GFC_OMP_SPLIT_TEAMS
),
5517 GFC_OMP_MASK_TARGET
= (1 << GFC_OMP_SPLIT_TARGET
),
5518 GFC_OMP_MASK_TASKLOOP
= (1 << GFC_OMP_SPLIT_TASKLOOP
),
5519 GFC_OMP_MASK_MASKED
= (1 << GFC_OMP_SPLIT_MASKED
)
5522 /* If a var is in lastprivate/firstprivate/reduction but not in a
5523 data mapping/sharing clause, add it to 'map(tofrom:' if is_target
5524 and to 'shared' otherwise. */
5526 gfc_add_clause_implicitly (gfc_omp_clauses
*clauses_out
,
5527 gfc_omp_clauses
*clauses_in
,
5528 bool is_target
, bool is_parallel_do
)
5530 int clauselist_to_add
= is_target
? OMP_LIST_MAP
: OMP_LIST_SHARED
;
5531 gfc_omp_namelist
*tail
= NULL
;
5532 for (int i
= 0; i
< 5; ++i
)
5534 gfc_omp_namelist
*n
;
5537 case 0: n
= clauses_in
->lists
[OMP_LIST_FIRSTPRIVATE
]; break;
5538 case 1: n
= clauses_in
->lists
[OMP_LIST_LASTPRIVATE
]; break;
5539 case 2: n
= clauses_in
->lists
[OMP_LIST_REDUCTION
]; break;
5540 case 3: n
= clauses_in
->lists
[OMP_LIST_REDUCTION_INSCAN
]; break;
5541 case 4: n
= clauses_in
->lists
[OMP_LIST_REDUCTION_TASK
]; break;
5542 default: gcc_unreachable ();
5544 for (; n
!= NULL
; n
= n
->next
)
5546 gfc_omp_namelist
*n2
, **n_firstp
= NULL
, **n_lastp
= NULL
;
5547 for (int j
= 0; j
< 6; ++j
)
5549 gfc_omp_namelist
**n2ref
= NULL
, *prev2
= NULL
;
5553 n2ref
= &clauses_out
->lists
[clauselist_to_add
];
5556 n2ref
= &clauses_out
->lists
[OMP_LIST_FIRSTPRIVATE
];
5560 n2ref
= &clauses_in
->lists
[OMP_LIST_LASTPRIVATE
];
5562 n2ref
= &clauses_out
->lists
[OMP_LIST_LASTPRIVATE
];
5564 case 3: n2ref
= &clauses_out
->lists
[OMP_LIST_REDUCTION
]; break;
5566 n2ref
= &clauses_out
->lists
[OMP_LIST_REDUCTION_INSCAN
];
5569 n2ref
= &clauses_out
->lists
[OMP_LIST_REDUCTION_TASK
];
5571 default: gcc_unreachable ();
5573 for (n2
= *n2ref
; n2
!= NULL
; prev2
= n2
, n2
= n2
->next
)
5574 if (n2
->sym
== n
->sym
)
5578 if (j
== 0 /* clauselist_to_add */)
5579 break; /* Already present. */
5580 if (j
== 1 /* OMP_LIST_FIRSTPRIVATE */)
5582 n_firstp
= prev2
? &prev2
->next
: n2ref
;
5585 if (j
== 2 /* OMP_LIST_LASTPRIVATE */)
5587 n_lastp
= prev2
? &prev2
->next
: n2ref
;
5593 if (n_firstp
&& n_lastp
)
5595 /* For parallel do, GCC puts firstprivatee/lastprivate
5599 *n_firstp
= (*n_firstp
)->next
;
5601 *n_lastp
= (*n_lastp
)->next
;
5603 else if (is_target
&& n_lastp
)
5605 else if (n2
|| n_firstp
|| n_lastp
)
5607 if (clauses_out
->lists
[clauselist_to_add
]
5608 && (clauses_out
->lists
[clauselist_to_add
]
5609 == clauses_in
->lists
[clauselist_to_add
]))
5611 gfc_omp_namelist
*p
= NULL
;
5612 for (n2
= clauses_in
->lists
[clauselist_to_add
]; n2
; n2
= n2
->next
)
5616 p
->next
= gfc_get_omp_namelist ();
5621 p
= gfc_get_omp_namelist ();
5622 clauses_out
->lists
[clauselist_to_add
] = p
;
5629 tail
= clauses_out
->lists
[clauselist_to_add
];
5630 for (; tail
&& tail
->next
; tail
= tail
->next
)
5633 n2
= gfc_get_omp_namelist ();
5634 n2
->where
= n
->where
;
5637 n2
->u
.map_op
= OMP_MAP_TOFROM
;
5644 clauses_out
->lists
[clauselist_to_add
] = n2
;
5650 gfc_free_split_omp_clauses (gfc_code
*code
, gfc_omp_clauses
*clausesa
)
5652 for (int i
= 0; i
< GFC_OMP_SPLIT_NUM
; ++i
)
5653 for (int j
= 0; j
< OMP_LIST_NUM
; ++j
)
5654 if (clausesa
[i
].lists
[j
] && clausesa
[i
].lists
[j
] != code
->ext
.omp_clauses
->lists
[j
])
5655 for (gfc_omp_namelist
*n
= clausesa
[i
].lists
[j
]; n
;)
5657 gfc_omp_namelist
*p
= n
;
5664 gfc_split_omp_clauses (gfc_code
*code
,
5665 gfc_omp_clauses clausesa
[GFC_OMP_SPLIT_NUM
])
5667 int mask
= 0, innermost
= 0;
5668 bool is_loop
= false;
5669 memset (clausesa
, 0, GFC_OMP_SPLIT_NUM
* sizeof (gfc_omp_clauses
));
5672 case EXEC_OMP_DISTRIBUTE
:
5673 innermost
= GFC_OMP_SPLIT_DISTRIBUTE
;
5675 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO
:
5676 mask
= GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
5677 innermost
= GFC_OMP_SPLIT_DO
;
5679 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD
:
5680 mask
= GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_PARALLEL
5681 | GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
5682 innermost
= GFC_OMP_SPLIT_SIMD
;
5684 case EXEC_OMP_DISTRIBUTE_SIMD
:
5685 mask
= GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_SIMD
;
5686 innermost
= GFC_OMP_SPLIT_SIMD
;
5690 innermost
= GFC_OMP_SPLIT_DO
;
5692 case EXEC_OMP_DO_SIMD
:
5693 mask
= GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
5694 innermost
= GFC_OMP_SPLIT_SIMD
;
5696 case EXEC_OMP_PARALLEL
:
5697 innermost
= GFC_OMP_SPLIT_PARALLEL
;
5699 case EXEC_OMP_PARALLEL_DO
:
5700 case EXEC_OMP_PARALLEL_LOOP
:
5701 mask
= GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
5702 innermost
= GFC_OMP_SPLIT_DO
;
5704 case EXEC_OMP_PARALLEL_DO_SIMD
:
5705 mask
= GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
5706 innermost
= GFC_OMP_SPLIT_SIMD
;
5708 case EXEC_OMP_PARALLEL_MASKED
:
5709 mask
= GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_MASKED
;
5710 innermost
= GFC_OMP_SPLIT_MASKED
;
5712 case EXEC_OMP_PARALLEL_MASKED_TASKLOOP
:
5713 mask
= (GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_MASKED
5714 | GFC_OMP_MASK_TASKLOOP
| GFC_OMP_MASK_SIMD
);
5715 innermost
= GFC_OMP_SPLIT_TASKLOOP
;
5717 case EXEC_OMP_PARALLEL_MASTER_TASKLOOP
:
5718 mask
= GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_TASKLOOP
| GFC_OMP_MASK_SIMD
;
5719 innermost
= GFC_OMP_SPLIT_TASKLOOP
;
5721 case EXEC_OMP_PARALLEL_MASKED_TASKLOOP_SIMD
:
5722 mask
= (GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_MASKED
5723 | GFC_OMP_MASK_TASKLOOP
| GFC_OMP_MASK_SIMD
);
5724 innermost
= GFC_OMP_SPLIT_SIMD
;
5726 case EXEC_OMP_PARALLEL_MASTER_TASKLOOP_SIMD
:
5727 mask
= GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_TASKLOOP
| GFC_OMP_MASK_SIMD
;
5728 innermost
= GFC_OMP_SPLIT_SIMD
;
5731 innermost
= GFC_OMP_SPLIT_SIMD
;
5733 case EXEC_OMP_TARGET
:
5734 innermost
= GFC_OMP_SPLIT_TARGET
;
5736 case EXEC_OMP_TARGET_PARALLEL
:
5737 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_PARALLEL
;
5738 innermost
= GFC_OMP_SPLIT_PARALLEL
;
5740 case EXEC_OMP_TARGET_PARALLEL_DO
:
5741 case EXEC_OMP_TARGET_PARALLEL_LOOP
:
5742 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
5743 innermost
= GFC_OMP_SPLIT_DO
;
5745 case EXEC_OMP_TARGET_PARALLEL_DO_SIMD
:
5746 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
5747 | GFC_OMP_MASK_SIMD
;
5748 innermost
= GFC_OMP_SPLIT_SIMD
;
5750 case EXEC_OMP_TARGET_SIMD
:
5751 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_SIMD
;
5752 innermost
= GFC_OMP_SPLIT_SIMD
;
5754 case EXEC_OMP_TARGET_TEAMS
:
5755 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
;
5756 innermost
= GFC_OMP_SPLIT_TEAMS
;
5758 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
5759 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
5760 | GFC_OMP_MASK_DISTRIBUTE
;
5761 innermost
= GFC_OMP_SPLIT_DISTRIBUTE
;
5763 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO
:
5764 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
5765 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
5766 innermost
= GFC_OMP_SPLIT_DO
;
5768 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
5769 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
5770 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
5771 innermost
= GFC_OMP_SPLIT_SIMD
;
5773 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD
:
5774 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
5775 | GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_SIMD
;
5776 innermost
= GFC_OMP_SPLIT_SIMD
;
5778 case EXEC_OMP_TARGET_TEAMS_LOOP
:
5779 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DO
;
5780 innermost
= GFC_OMP_SPLIT_DO
;
5782 case EXEC_OMP_MASKED_TASKLOOP
:
5783 mask
= GFC_OMP_SPLIT_MASKED
| GFC_OMP_SPLIT_TASKLOOP
;
5784 innermost
= GFC_OMP_SPLIT_TASKLOOP
;
5786 case EXEC_OMP_MASTER_TASKLOOP
:
5787 case EXEC_OMP_TASKLOOP
:
5788 innermost
= GFC_OMP_SPLIT_TASKLOOP
;
5790 case EXEC_OMP_MASKED_TASKLOOP_SIMD
:
5791 mask
= GFC_OMP_MASK_MASKED
| GFC_OMP_MASK_TASKLOOP
| GFC_OMP_MASK_SIMD
;
5792 innermost
= GFC_OMP_SPLIT_SIMD
;
5794 case EXEC_OMP_MASTER_TASKLOOP_SIMD
:
5795 case EXEC_OMP_TASKLOOP_SIMD
:
5796 mask
= GFC_OMP_MASK_TASKLOOP
| GFC_OMP_MASK_SIMD
;
5797 innermost
= GFC_OMP_SPLIT_SIMD
;
5799 case EXEC_OMP_TEAMS
:
5800 innermost
= GFC_OMP_SPLIT_TEAMS
;
5802 case EXEC_OMP_TEAMS_DISTRIBUTE
:
5803 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
;
5804 innermost
= GFC_OMP_SPLIT_DISTRIBUTE
;
5806 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO
:
5807 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
5808 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
5809 innermost
= GFC_OMP_SPLIT_DO
;
5811 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
5812 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
5813 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
5814 innermost
= GFC_OMP_SPLIT_SIMD
;
5816 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD
:
5817 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_SIMD
;
5818 innermost
= GFC_OMP_SPLIT_SIMD
;
5820 case EXEC_OMP_TEAMS_LOOP
:
5821 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DO
;
5822 innermost
= GFC_OMP_SPLIT_DO
;
5829 clausesa
[innermost
] = *code
->ext
.omp_clauses
;
5832 /* Loops are similar to DO but still a bit different. */
5836 case EXEC_OMP_PARALLEL_LOOP
:
5837 case EXEC_OMP_TEAMS_LOOP
:
5838 case EXEC_OMP_TARGET_PARALLEL_LOOP
:
5839 case EXEC_OMP_TARGET_TEAMS_LOOP
:
5844 if (code
->ext
.omp_clauses
!= NULL
)
5846 if (mask
& GFC_OMP_MASK_TARGET
)
5848 /* First the clauses that are unique to some constructs. */
5849 clausesa
[GFC_OMP_SPLIT_TARGET
].lists
[OMP_LIST_MAP
]
5850 = code
->ext
.omp_clauses
->lists
[OMP_LIST_MAP
];
5851 clausesa
[GFC_OMP_SPLIT_TARGET
].lists
[OMP_LIST_IS_DEVICE_PTR
]
5852 = code
->ext
.omp_clauses
->lists
[OMP_LIST_IS_DEVICE_PTR
];
5853 clausesa
[GFC_OMP_SPLIT_TARGET
].device
5854 = code
->ext
.omp_clauses
->device
;
5855 for (int i
= 0; i
< OMP_DEFAULTMAP_CAT_NUM
; i
++)
5856 clausesa
[GFC_OMP_SPLIT_TARGET
].defaultmap
[i
]
5857 = code
->ext
.omp_clauses
->defaultmap
[i
];
5858 clausesa
[GFC_OMP_SPLIT_TARGET
].if_exprs
[OMP_IF_TARGET
]
5859 = code
->ext
.omp_clauses
->if_exprs
[OMP_IF_TARGET
];
5860 /* And this is copied to all. */
5861 clausesa
[GFC_OMP_SPLIT_TARGET
].if_expr
5862 = code
->ext
.omp_clauses
->if_expr
;
5864 if (mask
& GFC_OMP_MASK_TEAMS
)
5866 /* First the clauses that are unique to some constructs. */
5867 clausesa
[GFC_OMP_SPLIT_TEAMS
].num_teams
5868 = code
->ext
.omp_clauses
->num_teams
;
5869 clausesa
[GFC_OMP_SPLIT_TEAMS
].thread_limit
5870 = code
->ext
.omp_clauses
->thread_limit
;
5871 /* Shared and default clauses are allowed on parallel, teams
5873 clausesa
[GFC_OMP_SPLIT_TEAMS
].lists
[OMP_LIST_SHARED
]
5874 = code
->ext
.omp_clauses
->lists
[OMP_LIST_SHARED
];
5875 clausesa
[GFC_OMP_SPLIT_TEAMS
].default_sharing
5876 = code
->ext
.omp_clauses
->default_sharing
;
5878 if (mask
& GFC_OMP_MASK_DISTRIBUTE
)
5880 /* First the clauses that are unique to some constructs. */
5881 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].dist_sched_kind
5882 = code
->ext
.omp_clauses
->dist_sched_kind
;
5883 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].dist_chunk_size
5884 = code
->ext
.omp_clauses
->dist_chunk_size
;
5885 /* Duplicate collapse. */
5886 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].collapse
5887 = code
->ext
.omp_clauses
->collapse
;
5888 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].order_concurrent
5889 = code
->ext
.omp_clauses
->order_concurrent
;
5891 if (mask
& GFC_OMP_MASK_PARALLEL
)
5893 /* First the clauses that are unique to some constructs. */
5894 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_COPYIN
]
5895 = code
->ext
.omp_clauses
->lists
[OMP_LIST_COPYIN
];
5896 clausesa
[GFC_OMP_SPLIT_PARALLEL
].num_threads
5897 = code
->ext
.omp_clauses
->num_threads
;
5898 clausesa
[GFC_OMP_SPLIT_PARALLEL
].proc_bind
5899 = code
->ext
.omp_clauses
->proc_bind
;
5900 /* Shared and default clauses are allowed on parallel, teams
5902 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_SHARED
]
5903 = code
->ext
.omp_clauses
->lists
[OMP_LIST_SHARED
];
5904 clausesa
[GFC_OMP_SPLIT_PARALLEL
].default_sharing
5905 = code
->ext
.omp_clauses
->default_sharing
;
5906 clausesa
[GFC_OMP_SPLIT_PARALLEL
].if_exprs
[OMP_IF_PARALLEL
]
5907 = code
->ext
.omp_clauses
->if_exprs
[OMP_IF_PARALLEL
];
5908 /* And this is copied to all. */
5909 clausesa
[GFC_OMP_SPLIT_PARALLEL
].if_expr
5910 = code
->ext
.omp_clauses
->if_expr
;
5912 if (mask
& GFC_OMP_MASK_MASKED
)
5913 clausesa
[GFC_OMP_SPLIT_MASKED
].filter
= code
->ext
.omp_clauses
->filter
;
5914 if ((mask
& GFC_OMP_MASK_DO
) && !is_loop
)
5916 /* First the clauses that are unique to some constructs. */
5917 clausesa
[GFC_OMP_SPLIT_DO
].ordered
5918 = code
->ext
.omp_clauses
->ordered
;
5919 clausesa
[GFC_OMP_SPLIT_DO
].orderedc
5920 = code
->ext
.omp_clauses
->orderedc
;
5921 clausesa
[GFC_OMP_SPLIT_DO
].sched_kind
5922 = code
->ext
.omp_clauses
->sched_kind
;
5923 if (innermost
== GFC_OMP_SPLIT_SIMD
)
5924 clausesa
[GFC_OMP_SPLIT_DO
].sched_simd
5925 = code
->ext
.omp_clauses
->sched_simd
;
5926 clausesa
[GFC_OMP_SPLIT_DO
].sched_monotonic
5927 = code
->ext
.omp_clauses
->sched_monotonic
;
5928 clausesa
[GFC_OMP_SPLIT_DO
].sched_nonmonotonic
5929 = code
->ext
.omp_clauses
->sched_nonmonotonic
;
5930 clausesa
[GFC_OMP_SPLIT_DO
].chunk_size
5931 = code
->ext
.omp_clauses
->chunk_size
;
5932 clausesa
[GFC_OMP_SPLIT_DO
].nowait
5933 = code
->ext
.omp_clauses
->nowait
;
5935 if (mask
& GFC_OMP_MASK_DO
)
5937 clausesa
[GFC_OMP_SPLIT_DO
].bind
5938 = code
->ext
.omp_clauses
->bind
;
5939 /* Duplicate collapse. */
5940 clausesa
[GFC_OMP_SPLIT_DO
].collapse
5941 = code
->ext
.omp_clauses
->collapse
;
5942 clausesa
[GFC_OMP_SPLIT_DO
].order_concurrent
5943 = code
->ext
.omp_clauses
->order_concurrent
;
5945 if (mask
& GFC_OMP_MASK_SIMD
)
5947 clausesa
[GFC_OMP_SPLIT_SIMD
].safelen_expr
5948 = code
->ext
.omp_clauses
->safelen_expr
;
5949 clausesa
[GFC_OMP_SPLIT_SIMD
].simdlen_expr
5950 = code
->ext
.omp_clauses
->simdlen_expr
;
5951 clausesa
[GFC_OMP_SPLIT_SIMD
].lists
[OMP_LIST_ALIGNED
]
5952 = code
->ext
.omp_clauses
->lists
[OMP_LIST_ALIGNED
];
5953 /* Duplicate collapse. */
5954 clausesa
[GFC_OMP_SPLIT_SIMD
].collapse
5955 = code
->ext
.omp_clauses
->collapse
;
5956 clausesa
[GFC_OMP_SPLIT_SIMD
].if_exprs
[OMP_IF_SIMD
]
5957 = code
->ext
.omp_clauses
->if_exprs
[OMP_IF_SIMD
];
5958 clausesa
[GFC_OMP_SPLIT_SIMD
].order_concurrent
5959 = code
->ext
.omp_clauses
->order_concurrent
;
5960 /* And this is copied to all. */
5961 clausesa
[GFC_OMP_SPLIT_SIMD
].if_expr
5962 = code
->ext
.omp_clauses
->if_expr
;
5964 if (mask
& GFC_OMP_MASK_TASKLOOP
)
5966 /* First the clauses that are unique to some constructs. */
5967 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].nogroup
5968 = code
->ext
.omp_clauses
->nogroup
;
5969 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].grainsize
5970 = code
->ext
.omp_clauses
->grainsize
;
5971 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].grainsize_strict
5972 = code
->ext
.omp_clauses
->grainsize_strict
;
5973 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].num_tasks
5974 = code
->ext
.omp_clauses
->num_tasks
;
5975 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].num_tasks_strict
5976 = code
->ext
.omp_clauses
->num_tasks_strict
;
5977 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].priority
5978 = code
->ext
.omp_clauses
->priority
;
5979 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].final_expr
5980 = code
->ext
.omp_clauses
->final_expr
;
5981 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].untied
5982 = code
->ext
.omp_clauses
->untied
;
5983 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].mergeable
5984 = code
->ext
.omp_clauses
->mergeable
;
5985 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].if_exprs
[OMP_IF_TASKLOOP
]
5986 = code
->ext
.omp_clauses
->if_exprs
[OMP_IF_TASKLOOP
];
5987 /* And this is copied to all. */
5988 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].if_expr
5989 = code
->ext
.omp_clauses
->if_expr
;
5990 /* Shared and default clauses are allowed on parallel, teams
5992 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].lists
[OMP_LIST_SHARED
]
5993 = code
->ext
.omp_clauses
->lists
[OMP_LIST_SHARED
];
5994 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].default_sharing
5995 = code
->ext
.omp_clauses
->default_sharing
;
5996 /* Duplicate collapse. */
5997 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].collapse
5998 = code
->ext
.omp_clauses
->collapse
;
6000 /* Private clause is supported on all constructs but master/masked,
6001 it is enough to put it on the innermost one except for master/masked. For
6002 !$ omp parallel do put it on parallel though,
6003 as that's what we did for OpenMP 3.1. */
6004 clausesa
[((innermost
== GFC_OMP_SPLIT_DO
&& !is_loop
)
6005 || code
->op
== EXEC_OMP_PARALLEL_MASTER
6006 || code
->op
== EXEC_OMP_PARALLEL_MASKED
)
6007 ? (int) GFC_OMP_SPLIT_PARALLEL
6008 : innermost
].lists
[OMP_LIST_PRIVATE
]
6009 = code
->ext
.omp_clauses
->lists
[OMP_LIST_PRIVATE
];
6010 /* Firstprivate clause is supported on all constructs but
6011 simd and masked/master. Put it on the outermost of those and duplicate
6012 on parallel and teams. */
6013 if (mask
& GFC_OMP_MASK_TARGET
)
6014 clausesa
[GFC_OMP_SPLIT_TARGET
].lists
[OMP_LIST_FIRSTPRIVATE
]
6015 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
6016 if (mask
& GFC_OMP_MASK_TEAMS
)
6017 clausesa
[GFC_OMP_SPLIT_TEAMS
].lists
[OMP_LIST_FIRSTPRIVATE
]
6018 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
6019 else if (mask
& GFC_OMP_MASK_DISTRIBUTE
)
6020 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].lists
[OMP_LIST_FIRSTPRIVATE
]
6021 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
6022 if (mask
& GFC_OMP_MASK_TASKLOOP
)
6023 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].lists
[OMP_LIST_FIRSTPRIVATE
]
6024 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
6025 if ((mask
& GFC_OMP_MASK_PARALLEL
)
6026 && !(mask
& GFC_OMP_MASK_TASKLOOP
))
6027 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_FIRSTPRIVATE
]
6028 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
6029 else if ((mask
& GFC_OMP_MASK_DO
) && !is_loop
)
6030 clausesa
[GFC_OMP_SPLIT_DO
].lists
[OMP_LIST_FIRSTPRIVATE
]
6031 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
6032 /* Lastprivate is allowed on distribute, do, simd, taskloop and loop.
6033 In parallel do{, simd} we actually want to put it on
6034 parallel rather than do. */
6035 if (mask
& GFC_OMP_MASK_DISTRIBUTE
)
6036 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].lists
[OMP_LIST_LASTPRIVATE
]
6037 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
6038 if (mask
& GFC_OMP_MASK_TASKLOOP
)
6039 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].lists
[OMP_LIST_LASTPRIVATE
]
6040 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
6041 if ((mask
& GFC_OMP_MASK_PARALLEL
) && !is_loop
6042 && !(mask
& GFC_OMP_MASK_TASKLOOP
))
6043 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_LASTPRIVATE
]
6044 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
6045 else if (mask
& GFC_OMP_MASK_DO
)
6046 clausesa
[GFC_OMP_SPLIT_DO
].lists
[OMP_LIST_LASTPRIVATE
]
6047 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
6048 if (mask
& GFC_OMP_MASK_SIMD
)
6049 clausesa
[GFC_OMP_SPLIT_SIMD
].lists
[OMP_LIST_LASTPRIVATE
]
6050 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
6051 /* Reduction is allowed on simd, do, parallel, teams, taskloop, and loop.
6052 Duplicate it on all of them, but
6053 - omit on do if parallel is present;
6054 - omit on task and parallel if loop is present;
6055 additionally, inscan applies to do/simd only. */
6056 for (int i
= OMP_LIST_REDUCTION
; i
<= OMP_LIST_REDUCTION_TASK
; i
++)
6058 if (mask
& GFC_OMP_MASK_TASKLOOP
6059 && i
!= OMP_LIST_REDUCTION_INSCAN
)
6060 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].lists
[i
]
6061 = code
->ext
.omp_clauses
->lists
[i
];
6062 if (mask
& GFC_OMP_MASK_TEAMS
6063 && i
!= OMP_LIST_REDUCTION_INSCAN
6065 clausesa
[GFC_OMP_SPLIT_TEAMS
].lists
[i
]
6066 = code
->ext
.omp_clauses
->lists
[i
];
6067 if (mask
& GFC_OMP_MASK_PARALLEL
6068 && i
!= OMP_LIST_REDUCTION_INSCAN
6069 && !(mask
& GFC_OMP_MASK_TASKLOOP
)
6071 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[i
]
6072 = code
->ext
.omp_clauses
->lists
[i
];
6073 else if (mask
& GFC_OMP_MASK_DO
)
6074 clausesa
[GFC_OMP_SPLIT_DO
].lists
[i
]
6075 = code
->ext
.omp_clauses
->lists
[i
];
6076 if (mask
& GFC_OMP_MASK_SIMD
)
6077 clausesa
[GFC_OMP_SPLIT_SIMD
].lists
[i
]
6078 = code
->ext
.omp_clauses
->lists
[i
];
6080 if (mask
& GFC_OMP_MASK_TARGET
)
6081 clausesa
[GFC_OMP_SPLIT_TARGET
].lists
[OMP_LIST_IN_REDUCTION
]
6082 = code
->ext
.omp_clauses
->lists
[OMP_LIST_IN_REDUCTION
];
6083 if (mask
& GFC_OMP_MASK_TASKLOOP
)
6084 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].lists
[OMP_LIST_IN_REDUCTION
]
6085 = code
->ext
.omp_clauses
->lists
[OMP_LIST_IN_REDUCTION
];
6086 /* Linear clause is supported on do and simd,
6087 put it on the innermost one. */
6088 clausesa
[innermost
].lists
[OMP_LIST_LINEAR
]
6089 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LINEAR
];
6091 /* Propagate firstprivate/lastprivate/reduction vars to
6092 shared (parallel, teams) and map-tofrom (target). */
6093 if (mask
& GFC_OMP_MASK_TARGET
)
6094 gfc_add_clause_implicitly (&clausesa
[GFC_OMP_SPLIT_TARGET
],
6095 code
->ext
.omp_clauses
, true, false);
6096 if ((mask
& GFC_OMP_MASK_PARALLEL
) && innermost
!= GFC_OMP_MASK_PARALLEL
)
6097 gfc_add_clause_implicitly (&clausesa
[GFC_OMP_SPLIT_PARALLEL
],
6098 code
->ext
.omp_clauses
, false,
6099 mask
& GFC_OMP_MASK_DO
);
6100 if (mask
& GFC_OMP_MASK_TEAMS
&& innermost
!= GFC_OMP_MASK_TEAMS
)
6101 gfc_add_clause_implicitly (&clausesa
[GFC_OMP_SPLIT_TEAMS
],
6102 code
->ext
.omp_clauses
, false, false);
6103 if (((mask
& (GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
))
6104 == (GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
))
6106 clausesa
[GFC_OMP_SPLIT_DO
].nowait
= true;
6110 gfc_trans_omp_do_simd (gfc_code
*code
, stmtblock_t
*pblock
,
6111 gfc_omp_clauses
*clausesa
, tree omp_clauses
)
6114 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
6115 tree stmt
, body
, omp_do_clauses
= NULL_TREE
;
6116 bool free_clausesa
= false;
6119 gfc_start_block (&block
);
6121 gfc_init_block (&block
);
6123 if (clausesa
== NULL
)
6125 clausesa
= clausesa_buf
;
6126 gfc_split_omp_clauses (code
, clausesa
);
6127 free_clausesa
= true;
6131 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_DO
], code
->loc
);
6132 body
= gfc_trans_omp_do (code
, EXEC_OMP_SIMD
, pblock
? pblock
: &block
,
6133 &clausesa
[GFC_OMP_SPLIT_SIMD
], omp_clauses
);
6136 if (TREE_CODE (body
) != BIND_EXPR
)
6137 body
= build3_v (BIND_EXPR
, NULL
, body
, poplevel (1, 0));
6141 else if (TREE_CODE (body
) != BIND_EXPR
)
6142 body
= build3_v (BIND_EXPR
, NULL
, body
, NULL_TREE
);
6145 stmt
= make_node (OMP_FOR
);
6146 TREE_TYPE (stmt
) = void_type_node
;
6147 OMP_FOR_BODY (stmt
) = body
;
6148 OMP_FOR_CLAUSES (stmt
) = omp_do_clauses
;
6152 gfc_add_expr_to_block (&block
, stmt
);
6154 gfc_free_split_omp_clauses (code
, clausesa
);
6155 return gfc_finish_block (&block
);
6159 gfc_trans_omp_parallel_do (gfc_code
*code
, bool is_loop
, stmtblock_t
*pblock
,
6160 gfc_omp_clauses
*clausesa
)
6162 stmtblock_t block
, *new_pblock
= pblock
;
6163 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
6164 tree stmt
, omp_clauses
= NULL_TREE
;
6165 bool free_clausesa
= false;
6168 gfc_start_block (&block
);
6170 gfc_init_block (&block
);
6172 if (clausesa
== NULL
)
6174 clausesa
= clausesa_buf
;
6175 gfc_split_omp_clauses (code
, clausesa
);
6176 free_clausesa
= true;
6179 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_PARALLEL
],
6183 if (!clausesa
[GFC_OMP_SPLIT_DO
].ordered
6184 && clausesa
[GFC_OMP_SPLIT_DO
].sched_kind
!= OMP_SCHED_STATIC
)
6185 new_pblock
= &block
;
6189 stmt
= gfc_trans_omp_do (code
, is_loop
? EXEC_OMP_LOOP
: EXEC_OMP_DO
,
6190 new_pblock
, &clausesa
[GFC_OMP_SPLIT_DO
],
6194 if (TREE_CODE (stmt
) != BIND_EXPR
)
6195 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
6199 else if (TREE_CODE (stmt
) != BIND_EXPR
)
6200 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, NULL_TREE
);
6201 stmt
= build2_loc (gfc_get_location (&code
->loc
), OMP_PARALLEL
,
6202 void_type_node
, stmt
, omp_clauses
);
6203 OMP_PARALLEL_COMBINED (stmt
) = 1;
6204 gfc_add_expr_to_block (&block
, stmt
);
6206 gfc_free_split_omp_clauses (code
, clausesa
);
6207 return gfc_finish_block (&block
);
6211 gfc_trans_omp_parallel_do_simd (gfc_code
*code
, stmtblock_t
*pblock
,
6212 gfc_omp_clauses
*clausesa
)
6215 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
6216 tree stmt
, omp_clauses
= NULL_TREE
;
6217 bool free_clausesa
= false;
6220 gfc_start_block (&block
);
6222 gfc_init_block (&block
);
6224 if (clausesa
== NULL
)
6226 clausesa
= clausesa_buf
;
6227 gfc_split_omp_clauses (code
, clausesa
);
6228 free_clausesa
= true;
6232 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_PARALLEL
],
6236 stmt
= gfc_trans_omp_do_simd (code
, pblock
, clausesa
, omp_clauses
);
6239 if (TREE_CODE (stmt
) != BIND_EXPR
)
6240 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
6244 else if (TREE_CODE (stmt
) != BIND_EXPR
)
6245 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, NULL_TREE
);
6248 stmt
= build2_loc (gfc_get_location (&code
->loc
), OMP_PARALLEL
,
6249 void_type_node
, stmt
, omp_clauses
);
6250 OMP_PARALLEL_COMBINED (stmt
) = 1;
6252 gfc_add_expr_to_block (&block
, stmt
);
6254 gfc_free_split_omp_clauses (code
, clausesa
);
6255 return gfc_finish_block (&block
);
6259 gfc_trans_omp_parallel_sections (gfc_code
*code
)
6262 gfc_omp_clauses section_clauses
;
6263 tree stmt
, omp_clauses
;
6265 memset (§ion_clauses
, 0, sizeof (section_clauses
));
6266 section_clauses
.nowait
= true;
6268 gfc_start_block (&block
);
6269 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
6272 stmt
= gfc_trans_omp_sections (code
, §ion_clauses
);
6273 if (TREE_CODE (stmt
) != BIND_EXPR
)
6274 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
6277 stmt
= build2_loc (gfc_get_location (&code
->loc
), OMP_PARALLEL
,
6278 void_type_node
, stmt
, omp_clauses
);
6279 OMP_PARALLEL_COMBINED (stmt
) = 1;
6280 gfc_add_expr_to_block (&block
, stmt
);
6281 return gfc_finish_block (&block
);
6285 gfc_trans_omp_parallel_workshare (gfc_code
*code
)
6288 gfc_omp_clauses workshare_clauses
;
6289 tree stmt
, omp_clauses
;
6291 memset (&workshare_clauses
, 0, sizeof (workshare_clauses
));
6292 workshare_clauses
.nowait
= true;
6294 gfc_start_block (&block
);
6295 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
6298 stmt
= gfc_trans_omp_workshare (code
, &workshare_clauses
);
6299 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
6300 stmt
= build2_loc (gfc_get_location (&code
->loc
), OMP_PARALLEL
,
6301 void_type_node
, stmt
, omp_clauses
);
6302 OMP_PARALLEL_COMBINED (stmt
) = 1;
6303 gfc_add_expr_to_block (&block
, stmt
);
6304 return gfc_finish_block (&block
);
6308 gfc_trans_omp_scope (gfc_code
*code
)
6311 tree body
= gfc_trans_code (code
->block
->next
);
6312 if (IS_EMPTY_STMT (body
))
6314 gfc_start_block (&block
);
6315 tree omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
6317 tree stmt
= make_node (OMP_SCOPE
);
6318 TREE_TYPE (stmt
) = void_type_node
;
6319 OMP_SCOPE_BODY (stmt
) = body
;
6320 OMP_SCOPE_CLAUSES (stmt
) = omp_clauses
;
6321 gfc_add_expr_to_block (&block
, stmt
);
6322 return gfc_finish_block (&block
);
6326 gfc_trans_omp_sections (gfc_code
*code
, gfc_omp_clauses
*clauses
)
6328 stmtblock_t block
, body
;
6329 tree omp_clauses
, stmt
;
6330 bool has_lastprivate
= clauses
->lists
[OMP_LIST_LASTPRIVATE
] != NULL
;
6331 location_t loc
= gfc_get_location (&code
->loc
);
6333 gfc_start_block (&block
);
6335 omp_clauses
= gfc_trans_omp_clauses (&block
, clauses
, code
->loc
);
6337 gfc_init_block (&body
);
6338 for (code
= code
->block
; code
; code
= code
->block
)
6340 /* Last section is special because of lastprivate, so even if it
6341 is empty, chain it in. */
6342 stmt
= gfc_trans_omp_code (code
->next
,
6343 has_lastprivate
&& code
->block
== NULL
);
6344 if (! IS_EMPTY_STMT (stmt
))
6346 stmt
= build1_v (OMP_SECTION
, stmt
);
6347 gfc_add_expr_to_block (&body
, stmt
);
6350 stmt
= gfc_finish_block (&body
);
6352 stmt
= build2_loc (loc
, OMP_SECTIONS
, void_type_node
, stmt
, omp_clauses
);
6353 gfc_add_expr_to_block (&block
, stmt
);
6355 return gfc_finish_block (&block
);
6359 gfc_trans_omp_single (gfc_code
*code
, gfc_omp_clauses
*clauses
)
6361 tree omp_clauses
= gfc_trans_omp_clauses (NULL
, clauses
, code
->loc
);
6362 tree stmt
= gfc_trans_omp_code (code
->block
->next
, true);
6363 stmt
= build2_loc (gfc_get_location (&code
->loc
), OMP_SINGLE
, void_type_node
,
6369 gfc_trans_omp_task (gfc_code
*code
)
6372 tree stmt
, omp_clauses
;
6374 gfc_start_block (&block
);
6375 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
6378 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
6379 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
6380 stmt
= build2_loc (gfc_get_location (&code
->loc
), OMP_TASK
, void_type_node
,
6382 gfc_add_expr_to_block (&block
, stmt
);
6383 return gfc_finish_block (&block
);
6387 gfc_trans_omp_taskgroup (gfc_code
*code
)
6389 tree body
= gfc_trans_code (code
->block
->next
);
6390 tree stmt
= make_node (OMP_TASKGROUP
);
6391 TREE_TYPE (stmt
) = void_type_node
;
6392 OMP_TASKGROUP_BODY (stmt
) = body
;
6393 OMP_TASKGROUP_CLAUSES (stmt
) = NULL_TREE
;
6398 gfc_trans_omp_taskwait (gfc_code
*code
)
6400 if (!code
->ext
.omp_clauses
)
6402 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TASKWAIT
);
6403 return build_call_expr_loc (input_location
, decl
, 0);
6406 gfc_start_block (&block
);
6407 tree stmt
= make_node (OMP_TASK
);
6408 TREE_TYPE (stmt
) = void_type_node
;
6409 OMP_TASK_BODY (stmt
) = NULL_TREE
;
6410 OMP_TASK_CLAUSES (stmt
) = gfc_trans_omp_clauses (&block
,
6411 code
->ext
.omp_clauses
,
6413 gfc_add_expr_to_block (&block
, stmt
);
6414 return gfc_finish_block (&block
);
6418 gfc_trans_omp_taskyield (void)
6420 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TASKYIELD
);
6421 return build_call_expr_loc (input_location
, decl
, 0);
6425 gfc_trans_omp_distribute (gfc_code
*code
, gfc_omp_clauses
*clausesa
)
6428 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
6429 tree stmt
, omp_clauses
= NULL_TREE
;
6430 bool free_clausesa
= false;
6432 gfc_start_block (&block
);
6433 if (clausesa
== NULL
)
6435 clausesa
= clausesa_buf
;
6436 gfc_split_omp_clauses (code
, clausesa
);
6437 free_clausesa
= true;
6441 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
],
6445 case EXEC_OMP_DISTRIBUTE
:
6446 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
6447 case EXEC_OMP_TEAMS_DISTRIBUTE
:
6448 /* This is handled in gfc_trans_omp_do. */
6451 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO
:
6452 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO
:
6453 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO
:
6454 stmt
= gfc_trans_omp_parallel_do (code
, false, &block
, clausesa
);
6455 if (TREE_CODE (stmt
) != BIND_EXPR
)
6456 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
6460 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD
:
6461 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
6462 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
6463 stmt
= gfc_trans_omp_parallel_do_simd (code
, &block
, clausesa
);
6464 if (TREE_CODE (stmt
) != BIND_EXPR
)
6465 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
6469 case EXEC_OMP_DISTRIBUTE_SIMD
:
6470 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD
:
6471 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD
:
6472 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_SIMD
, &block
,
6473 &clausesa
[GFC_OMP_SPLIT_SIMD
], NULL_TREE
);
6474 if (TREE_CODE (stmt
) != BIND_EXPR
)
6475 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
6484 tree distribute
= make_node (OMP_DISTRIBUTE
);
6485 TREE_TYPE (distribute
) = void_type_node
;
6486 OMP_FOR_BODY (distribute
) = stmt
;
6487 OMP_FOR_CLAUSES (distribute
) = omp_clauses
;
6490 gfc_add_expr_to_block (&block
, stmt
);
6492 gfc_free_split_omp_clauses (code
, clausesa
);
6493 return gfc_finish_block (&block
);
6497 gfc_trans_omp_teams (gfc_code
*code
, gfc_omp_clauses
*clausesa
,
6501 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
6503 bool combined
= true, free_clausesa
= false;
6505 gfc_start_block (&block
);
6506 if (clausesa
== NULL
)
6508 clausesa
= clausesa_buf
;
6509 gfc_split_omp_clauses (code
, clausesa
);
6510 free_clausesa
= true;
6515 = chainon (omp_clauses
,
6516 gfc_trans_omp_clauses (&block
,
6517 &clausesa
[GFC_OMP_SPLIT_TEAMS
],
6523 case EXEC_OMP_TARGET_TEAMS
:
6524 case EXEC_OMP_TEAMS
:
6525 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
6528 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
6529 case EXEC_OMP_TEAMS_DISTRIBUTE
:
6530 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_DISTRIBUTE
, NULL
,
6531 &clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
],
6534 case EXEC_OMP_TARGET_TEAMS_LOOP
:
6535 case EXEC_OMP_TEAMS_LOOP
:
6536 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_LOOP
, NULL
,
6537 &clausesa
[GFC_OMP_SPLIT_DO
],
6541 stmt
= gfc_trans_omp_distribute (code
, clausesa
);
6546 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
6547 stmt
= build2_loc (gfc_get_location (&code
->loc
), OMP_TEAMS
,
6548 void_type_node
, stmt
, omp_clauses
);
6550 OMP_TEAMS_COMBINED (stmt
) = 1;
6552 gfc_add_expr_to_block (&block
, stmt
);
6554 gfc_free_split_omp_clauses (code
, clausesa
);
6555 return gfc_finish_block (&block
);
6559 gfc_trans_omp_target (gfc_code
*code
)
6562 gfc_omp_clauses clausesa
[GFC_OMP_SPLIT_NUM
];
6563 tree stmt
, omp_clauses
= NULL_TREE
;
6565 gfc_start_block (&block
);
6566 gfc_split_omp_clauses (code
, clausesa
);
6569 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_TARGET
],
6573 case EXEC_OMP_TARGET
:
6575 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
6576 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
6578 case EXEC_OMP_TARGET_PARALLEL
:
6583 gfc_start_block (&iblock
);
6585 = gfc_trans_omp_clauses (&iblock
, &clausesa
[GFC_OMP_SPLIT_PARALLEL
],
6587 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
6588 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
6590 gfc_add_expr_to_block (&iblock
, stmt
);
6591 stmt
= gfc_finish_block (&iblock
);
6592 if (TREE_CODE (stmt
) != BIND_EXPR
)
6593 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
6598 case EXEC_OMP_TARGET_PARALLEL_DO
:
6599 case EXEC_OMP_TARGET_PARALLEL_LOOP
:
6600 stmt
= gfc_trans_omp_parallel_do (code
,
6602 == EXEC_OMP_TARGET_PARALLEL_LOOP
),
6604 if (TREE_CODE (stmt
) != BIND_EXPR
)
6605 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
6609 case EXEC_OMP_TARGET_PARALLEL_DO_SIMD
:
6610 stmt
= gfc_trans_omp_parallel_do_simd (code
, &block
, clausesa
);
6611 if (TREE_CODE (stmt
) != BIND_EXPR
)
6612 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
6616 case EXEC_OMP_TARGET_SIMD
:
6617 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_SIMD
, &block
,
6618 &clausesa
[GFC_OMP_SPLIT_SIMD
], NULL_TREE
);
6619 if (TREE_CODE (stmt
) != BIND_EXPR
)
6620 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
6626 && (clausesa
[GFC_OMP_SPLIT_TEAMS
].num_teams
6627 || clausesa
[GFC_OMP_SPLIT_TEAMS
].thread_limit
))
6629 gfc_omp_clauses clausesb
;
6631 /* For combined !$omp target teams, the num_teams and
6632 thread_limit clauses are evaluated before entering the
6633 target construct. */
6634 memset (&clausesb
, '\0', sizeof (clausesb
));
6635 clausesb
.num_teams
= clausesa
[GFC_OMP_SPLIT_TEAMS
].num_teams
;
6636 clausesb
.thread_limit
= clausesa
[GFC_OMP_SPLIT_TEAMS
].thread_limit
;
6637 clausesa
[GFC_OMP_SPLIT_TEAMS
].num_teams
= NULL
;
6638 clausesa
[GFC_OMP_SPLIT_TEAMS
].thread_limit
= NULL
;
6640 = gfc_trans_omp_clauses (&block
, &clausesb
, code
->loc
);
6642 stmt
= gfc_trans_omp_teams (code
, clausesa
, teams_clauses
);
6647 stmt
= gfc_trans_omp_teams (code
, clausesa
, NULL_TREE
);
6649 if (TREE_CODE (stmt
) != BIND_EXPR
)
6650 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
6657 stmt
= build2_loc (gfc_get_location (&code
->loc
), OMP_TARGET
,
6658 void_type_node
, stmt
, omp_clauses
);
6659 if (code
->op
!= EXEC_OMP_TARGET
)
6660 OMP_TARGET_COMBINED (stmt
) = 1;
6661 cfun
->has_omp_target
= true;
6663 gfc_add_expr_to_block (&block
, stmt
);
6664 gfc_free_split_omp_clauses (code
, clausesa
);
6665 return gfc_finish_block (&block
);
6669 gfc_trans_omp_taskloop (gfc_code
*code
, gfc_exec_op op
)
6672 gfc_omp_clauses clausesa
[GFC_OMP_SPLIT_NUM
];
6673 tree stmt
, omp_clauses
= NULL_TREE
;
6675 gfc_start_block (&block
);
6676 gfc_split_omp_clauses (code
, clausesa
);
6679 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_TASKLOOP
],
6683 case EXEC_OMP_TASKLOOP
:
6684 /* This is handled in gfc_trans_omp_do. */
6687 case EXEC_OMP_TASKLOOP_SIMD
:
6688 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_SIMD
, &block
,
6689 &clausesa
[GFC_OMP_SPLIT_SIMD
], NULL_TREE
);
6690 if (TREE_CODE (stmt
) != BIND_EXPR
)
6691 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
6700 tree taskloop
= make_node (OMP_TASKLOOP
);
6701 TREE_TYPE (taskloop
) = void_type_node
;
6702 OMP_FOR_BODY (taskloop
) = stmt
;
6703 OMP_FOR_CLAUSES (taskloop
) = omp_clauses
;
6706 gfc_add_expr_to_block (&block
, stmt
);
6707 gfc_free_split_omp_clauses (code
, clausesa
);
6708 return gfc_finish_block (&block
);
6712 gfc_trans_omp_master_masked_taskloop (gfc_code
*code
, gfc_exec_op op
)
6714 gfc_omp_clauses clausesa
[GFC_OMP_SPLIT_NUM
];
6718 if (op
!= EXEC_OMP_MASTER_TASKLOOP_SIMD
6719 && code
->op
!= EXEC_OMP_MASTER_TASKLOOP
)
6720 gfc_split_omp_clauses (code
, clausesa
);
6723 if (op
== EXEC_OMP_MASKED_TASKLOOP_SIMD
6724 || op
== EXEC_OMP_MASTER_TASKLOOP_SIMD
)
6725 stmt
= gfc_trans_omp_taskloop (code
, EXEC_OMP_TASKLOOP_SIMD
);
6728 gcc_assert (op
== EXEC_OMP_MASKED_TASKLOOP
6729 || op
== EXEC_OMP_MASTER_TASKLOOP
);
6730 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_TASKLOOP
, NULL
,
6731 code
->op
!= EXEC_OMP_MASTER_TASKLOOP
6732 ? &clausesa
[GFC_OMP_SPLIT_TASKLOOP
]
6733 : code
->ext
.omp_clauses
, NULL
);
6735 if (TREE_CODE (stmt
) != BIND_EXPR
)
6736 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
6739 gfc_start_block (&block
);
6740 if (op
== EXEC_OMP_MASKED_TASKLOOP
|| op
== EXEC_OMP_MASKED_TASKLOOP_SIMD
)
6742 tree clauses
= gfc_trans_omp_clauses (&block
,
6743 &clausesa
[GFC_OMP_SPLIT_MASKED
],
6745 tree msk
= make_node (OMP_MASKED
);
6746 TREE_TYPE (msk
) = void_type_node
;
6747 OMP_MASKED_BODY (msk
) = stmt
;
6748 OMP_MASKED_CLAUSES (msk
) = clauses
;
6749 OMP_MASKED_COMBINED (msk
) = 1;
6750 gfc_add_expr_to_block (&block
, msk
);
6754 gcc_assert (op
== EXEC_OMP_MASTER_TASKLOOP
6755 || op
== EXEC_OMP_MASTER_TASKLOOP_SIMD
);
6756 stmt
= build1_v (OMP_MASTER
, stmt
);
6757 gfc_add_expr_to_block (&block
, stmt
);
6759 if (op
!= EXEC_OMP_MASTER_TASKLOOP_SIMD
6760 && code
->op
!= EXEC_OMP_MASTER_TASKLOOP
)
6761 gfc_free_split_omp_clauses (code
, clausesa
);
6762 return gfc_finish_block (&block
);
6766 gfc_trans_omp_parallel_master_masked (gfc_code
*code
)
6769 tree stmt
, omp_clauses
;
6770 gfc_omp_clauses clausesa
[GFC_OMP_SPLIT_NUM
];
6771 bool parallel_combined
= false;
6773 if (code
->op
!= EXEC_OMP_PARALLEL_MASTER
)
6774 gfc_split_omp_clauses (code
, clausesa
);
6776 gfc_start_block (&block
);
6777 omp_clauses
= gfc_trans_omp_clauses (&block
,
6778 code
->op
== EXEC_OMP_PARALLEL_MASTER
6779 ? code
->ext
.omp_clauses
6780 : &clausesa
[GFC_OMP_SPLIT_PARALLEL
],
6783 if (code
->op
== EXEC_OMP_PARALLEL_MASTER
)
6784 stmt
= gfc_trans_omp_master (code
);
6785 else if (code
->op
== EXEC_OMP_PARALLEL_MASKED
)
6786 stmt
= gfc_trans_omp_masked (code
, &clausesa
[GFC_OMP_SPLIT_MASKED
]);
6792 case EXEC_OMP_PARALLEL_MASKED_TASKLOOP
:
6793 op
= EXEC_OMP_MASKED_TASKLOOP
;
6795 case EXEC_OMP_PARALLEL_MASKED_TASKLOOP_SIMD
:
6796 op
= EXEC_OMP_MASKED_TASKLOOP_SIMD
;
6798 case EXEC_OMP_PARALLEL_MASTER_TASKLOOP
:
6799 op
= EXEC_OMP_MASTER_TASKLOOP
;
6801 case EXEC_OMP_PARALLEL_MASTER_TASKLOOP_SIMD
:
6802 op
= EXEC_OMP_MASTER_TASKLOOP_SIMD
;
6807 stmt
= gfc_trans_omp_master_masked_taskloop (code
, op
);
6808 parallel_combined
= true;
6810 if (TREE_CODE (stmt
) != BIND_EXPR
)
6811 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
6814 stmt
= build2_loc (gfc_get_location (&code
->loc
), OMP_PARALLEL
,
6815 void_type_node
, stmt
, omp_clauses
);
6816 /* masked does have just filter clause, but during gimplification
6817 isn't represented by a gimplification omp context, so for
6818 !$omp parallel masked don't set OMP_PARALLEL_COMBINED,
6820 !$omp parallel masked
6821 !$omp taskloop simd lastprivate (x)
6823 !$omp parallel masked taskloop simd lastprivate (x) */
6824 if (parallel_combined
)
6825 OMP_PARALLEL_COMBINED (stmt
) = 1;
6826 gfc_add_expr_to_block (&block
, stmt
);
6827 if (code
->op
!= EXEC_OMP_PARALLEL_MASTER
)
6828 gfc_free_split_omp_clauses (code
, clausesa
);
6829 return gfc_finish_block (&block
);
6833 gfc_trans_omp_target_data (gfc_code
*code
)
6836 tree stmt
, omp_clauses
;
6838 gfc_start_block (&block
);
6839 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
6841 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
6842 stmt
= build2_loc (gfc_get_location (&code
->loc
), OMP_TARGET_DATA
,
6843 void_type_node
, stmt
, omp_clauses
);
6844 gfc_add_expr_to_block (&block
, stmt
);
6845 return gfc_finish_block (&block
);
6849 gfc_trans_omp_target_enter_data (gfc_code
*code
)
6852 tree stmt
, omp_clauses
;
6854 gfc_start_block (&block
);
6855 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
6857 stmt
= build1_loc (input_location
, OMP_TARGET_ENTER_DATA
, void_type_node
,
6859 gfc_add_expr_to_block (&block
, stmt
);
6860 return gfc_finish_block (&block
);
6864 gfc_trans_omp_target_exit_data (gfc_code
*code
)
6867 tree stmt
, omp_clauses
;
6869 gfc_start_block (&block
);
6870 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
6872 stmt
= build1_loc (input_location
, OMP_TARGET_EXIT_DATA
, void_type_node
,
6874 gfc_add_expr_to_block (&block
, stmt
);
6875 return gfc_finish_block (&block
);
6879 gfc_trans_omp_target_update (gfc_code
*code
)
6882 tree stmt
, omp_clauses
;
6884 gfc_start_block (&block
);
6885 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
6887 stmt
= build1_loc (input_location
, OMP_TARGET_UPDATE
, void_type_node
,
6889 gfc_add_expr_to_block (&block
, stmt
);
6890 return gfc_finish_block (&block
);
6894 gfc_trans_omp_workshare (gfc_code
*code
, gfc_omp_clauses
*clauses
)
6896 tree res
, tmp
, stmt
;
6897 stmtblock_t block
, *pblock
= NULL
;
6898 stmtblock_t singleblock
;
6899 int saved_ompws_flags
;
6900 bool singleblock_in_progress
= false;
6901 /* True if previous gfc_code in workshare construct is not workshared. */
6902 bool prev_singleunit
;
6903 location_t loc
= gfc_get_location (&code
->loc
);
6905 code
= code
->block
->next
;
6909 gfc_start_block (&block
);
6912 ompws_flags
= OMPWS_WORKSHARE_FLAG
;
6913 prev_singleunit
= false;
6915 /* Translate statements one by one to trees until we reach
6916 the end of the workshare construct. Adjacent gfc_codes that
6917 are a single unit of work are clustered and encapsulated in a
6918 single OMP_SINGLE construct. */
6919 for (; code
; code
= code
->next
)
6921 if (code
->here
!= 0)
6923 res
= gfc_trans_label_here (code
);
6924 gfc_add_expr_to_block (pblock
, res
);
6927 /* No dependence analysis, use for clauses with wait.
6928 If this is the last gfc_code, use default omp_clauses. */
6929 if (code
->next
== NULL
&& clauses
->nowait
)
6930 ompws_flags
|= OMPWS_NOWAIT
;
6932 /* By default, every gfc_code is a single unit of work. */
6933 ompws_flags
|= OMPWS_CURR_SINGLEUNIT
;
6934 ompws_flags
&= ~(OMPWS_SCALARIZER_WS
| OMPWS_SCALARIZER_BODY
);
6943 res
= gfc_trans_assign (code
);
6946 case EXEC_POINTER_ASSIGN
:
6947 res
= gfc_trans_pointer_assign (code
);
6950 case EXEC_INIT_ASSIGN
:
6951 res
= gfc_trans_init_assign (code
);
6955 res
= gfc_trans_forall (code
);
6959 res
= gfc_trans_where (code
);
6962 case EXEC_OMP_ATOMIC
:
6963 res
= gfc_trans_omp_directive (code
);
6966 case EXEC_OMP_PARALLEL
:
6967 case EXEC_OMP_PARALLEL_DO
:
6968 case EXEC_OMP_PARALLEL_MASTER
:
6969 case EXEC_OMP_PARALLEL_SECTIONS
:
6970 case EXEC_OMP_PARALLEL_WORKSHARE
:
6971 case EXEC_OMP_CRITICAL
:
6972 saved_ompws_flags
= ompws_flags
;
6974 res
= gfc_trans_omp_directive (code
);
6975 ompws_flags
= saved_ompws_flags
;
6979 gfc_internal_error ("gfc_trans_omp_workshare(): Bad statement code");
6982 gfc_set_backend_locus (&code
->loc
);
6984 if (res
!= NULL_TREE
&& ! IS_EMPTY_STMT (res
))
6986 if (prev_singleunit
)
6988 if (ompws_flags
& OMPWS_CURR_SINGLEUNIT
)
6989 /* Add current gfc_code to single block. */
6990 gfc_add_expr_to_block (&singleblock
, res
);
6993 /* Finish single block and add it to pblock. */
6994 tmp
= gfc_finish_block (&singleblock
);
6995 tmp
= build2_loc (loc
, OMP_SINGLE
,
6996 void_type_node
, tmp
, NULL_TREE
);
6997 gfc_add_expr_to_block (pblock
, tmp
);
6998 /* Add current gfc_code to pblock. */
6999 gfc_add_expr_to_block (pblock
, res
);
7000 singleblock_in_progress
= false;
7005 if (ompws_flags
& OMPWS_CURR_SINGLEUNIT
)
7007 /* Start single block. */
7008 gfc_init_block (&singleblock
);
7009 gfc_add_expr_to_block (&singleblock
, res
);
7010 singleblock_in_progress
= true;
7011 loc
= gfc_get_location (&code
->loc
);
7014 /* Add the new statement to the block. */
7015 gfc_add_expr_to_block (pblock
, res
);
7017 prev_singleunit
= (ompws_flags
& OMPWS_CURR_SINGLEUNIT
) != 0;
7021 /* Finish remaining SINGLE block, if we were in the middle of one. */
7022 if (singleblock_in_progress
)
7024 /* Finish single block and add it to pblock. */
7025 tmp
= gfc_finish_block (&singleblock
);
7026 tmp
= build2_loc (loc
, OMP_SINGLE
, void_type_node
, tmp
,
7028 ? build_omp_clause (input_location
, OMP_CLAUSE_NOWAIT
)
7030 gfc_add_expr_to_block (pblock
, tmp
);
7033 stmt
= gfc_finish_block (pblock
);
7034 if (TREE_CODE (stmt
) != BIND_EXPR
)
7036 if (!IS_EMPTY_STMT (stmt
))
7038 tree bindblock
= poplevel (1, 0);
7039 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, bindblock
);
7047 if (IS_EMPTY_STMT (stmt
) && !clauses
->nowait
)
7048 stmt
= gfc_trans_omp_barrier ();
7055 gfc_trans_oacc_declare (gfc_code
*code
)
7058 tree stmt
, oacc_clauses
;
7059 enum tree_code construct_code
;
7061 construct_code
= OACC_DATA
;
7063 gfc_start_block (&block
);
7065 oacc_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.oacc_declare
->clauses
,
7066 code
->loc
, false, true);
7067 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
7068 stmt
= build2_loc (input_location
, construct_code
, void_type_node
, stmt
,
7070 gfc_add_expr_to_block (&block
, stmt
);
7072 return gfc_finish_block (&block
);
7076 gfc_trans_oacc_directive (gfc_code
*code
)
7080 case EXEC_OACC_PARALLEL_LOOP
:
7081 case EXEC_OACC_KERNELS_LOOP
:
7082 case EXEC_OACC_SERIAL_LOOP
:
7083 return gfc_trans_oacc_combined_directive (code
);
7084 case EXEC_OACC_PARALLEL
:
7085 case EXEC_OACC_KERNELS
:
7086 case EXEC_OACC_SERIAL
:
7087 case EXEC_OACC_DATA
:
7088 case EXEC_OACC_HOST_DATA
:
7089 return gfc_trans_oacc_construct (code
);
7090 case EXEC_OACC_LOOP
:
7091 return gfc_trans_omp_do (code
, code
->op
, NULL
, code
->ext
.omp_clauses
,
7093 case EXEC_OACC_UPDATE
:
7094 case EXEC_OACC_CACHE
:
7095 case EXEC_OACC_ENTER_DATA
:
7096 case EXEC_OACC_EXIT_DATA
:
7097 return gfc_trans_oacc_executable_directive (code
);
7098 case EXEC_OACC_WAIT
:
7099 return gfc_trans_oacc_wait_directive (code
);
7100 case EXEC_OACC_ATOMIC
:
7101 return gfc_trans_omp_atomic (code
);
7102 case EXEC_OACC_DECLARE
:
7103 return gfc_trans_oacc_declare (code
);
7110 gfc_trans_omp_directive (gfc_code
*code
)
7114 case EXEC_OMP_ATOMIC
:
7115 return gfc_trans_omp_atomic (code
);
7116 case EXEC_OMP_BARRIER
:
7117 return gfc_trans_omp_barrier ();
7118 case EXEC_OMP_CANCEL
:
7119 return gfc_trans_omp_cancel (code
);
7120 case EXEC_OMP_CANCELLATION_POINT
:
7121 return gfc_trans_omp_cancellation_point (code
);
7122 case EXEC_OMP_CRITICAL
:
7123 return gfc_trans_omp_critical (code
);
7124 case EXEC_OMP_DEPOBJ
:
7125 return gfc_trans_omp_depobj (code
);
7126 case EXEC_OMP_DISTRIBUTE
:
7130 case EXEC_OMP_TASKLOOP
:
7131 return gfc_trans_omp_do (code
, code
->op
, NULL
, code
->ext
.omp_clauses
,
7133 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO
:
7134 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD
:
7135 case EXEC_OMP_DISTRIBUTE_SIMD
:
7136 return gfc_trans_omp_distribute (code
, NULL
);
7137 case EXEC_OMP_DO_SIMD
:
7138 return gfc_trans_omp_do_simd (code
, NULL
, NULL
, NULL_TREE
);
7139 case EXEC_OMP_ERROR
:
7140 return gfc_trans_omp_error (code
);
7141 case EXEC_OMP_FLUSH
:
7142 return gfc_trans_omp_flush (code
);
7143 case EXEC_OMP_MASKED
:
7144 return gfc_trans_omp_masked (code
, NULL
);
7145 case EXEC_OMP_MASTER
:
7146 return gfc_trans_omp_master (code
);
7147 case EXEC_OMP_MASKED_TASKLOOP
:
7148 case EXEC_OMP_MASKED_TASKLOOP_SIMD
:
7149 case EXEC_OMP_MASTER_TASKLOOP
:
7150 case EXEC_OMP_MASTER_TASKLOOP_SIMD
:
7151 return gfc_trans_omp_master_masked_taskloop (code
, code
->op
);
7152 case EXEC_OMP_ORDERED
:
7153 return gfc_trans_omp_ordered (code
);
7154 case EXEC_OMP_PARALLEL
:
7155 return gfc_trans_omp_parallel (code
);
7156 case EXEC_OMP_PARALLEL_DO
:
7157 return gfc_trans_omp_parallel_do (code
, false, NULL
, NULL
);
7158 case EXEC_OMP_PARALLEL_LOOP
:
7159 return gfc_trans_omp_parallel_do (code
, true, NULL
, NULL
);
7160 case EXEC_OMP_PARALLEL_DO_SIMD
:
7161 return gfc_trans_omp_parallel_do_simd (code
, NULL
, NULL
);
7162 case EXEC_OMP_PARALLEL_MASKED
:
7163 case EXEC_OMP_PARALLEL_MASKED_TASKLOOP
:
7164 case EXEC_OMP_PARALLEL_MASKED_TASKLOOP_SIMD
:
7165 case EXEC_OMP_PARALLEL_MASTER
:
7166 case EXEC_OMP_PARALLEL_MASTER_TASKLOOP
:
7167 case EXEC_OMP_PARALLEL_MASTER_TASKLOOP_SIMD
:
7168 return gfc_trans_omp_parallel_master_masked (code
);
7169 case EXEC_OMP_PARALLEL_SECTIONS
:
7170 return gfc_trans_omp_parallel_sections (code
);
7171 case EXEC_OMP_PARALLEL_WORKSHARE
:
7172 return gfc_trans_omp_parallel_workshare (code
);
7173 case EXEC_OMP_SCOPE
:
7174 return gfc_trans_omp_scope (code
);
7175 case EXEC_OMP_SECTIONS
:
7176 return gfc_trans_omp_sections (code
, code
->ext
.omp_clauses
);
7177 case EXEC_OMP_SINGLE
:
7178 return gfc_trans_omp_single (code
, code
->ext
.omp_clauses
);
7179 case EXEC_OMP_TARGET
:
7180 case EXEC_OMP_TARGET_PARALLEL
:
7181 case EXEC_OMP_TARGET_PARALLEL_DO
:
7182 case EXEC_OMP_TARGET_PARALLEL_DO_SIMD
:
7183 case EXEC_OMP_TARGET_PARALLEL_LOOP
:
7184 case EXEC_OMP_TARGET_SIMD
:
7185 case EXEC_OMP_TARGET_TEAMS
:
7186 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
7187 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO
:
7188 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
7189 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD
:
7190 case EXEC_OMP_TARGET_TEAMS_LOOP
:
7191 return gfc_trans_omp_target (code
);
7192 case EXEC_OMP_TARGET_DATA
:
7193 return gfc_trans_omp_target_data (code
);
7194 case EXEC_OMP_TARGET_ENTER_DATA
:
7195 return gfc_trans_omp_target_enter_data (code
);
7196 case EXEC_OMP_TARGET_EXIT_DATA
:
7197 return gfc_trans_omp_target_exit_data (code
);
7198 case EXEC_OMP_TARGET_UPDATE
:
7199 return gfc_trans_omp_target_update (code
);
7201 return gfc_trans_omp_task (code
);
7202 case EXEC_OMP_TASKGROUP
:
7203 return gfc_trans_omp_taskgroup (code
);
7204 case EXEC_OMP_TASKLOOP_SIMD
:
7205 return gfc_trans_omp_taskloop (code
, code
->op
);
7206 case EXEC_OMP_TASKWAIT
:
7207 return gfc_trans_omp_taskwait (code
);
7208 case EXEC_OMP_TASKYIELD
:
7209 return gfc_trans_omp_taskyield ();
7210 case EXEC_OMP_TEAMS
:
7211 case EXEC_OMP_TEAMS_DISTRIBUTE
:
7212 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO
:
7213 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
7214 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD
:
7215 case EXEC_OMP_TEAMS_LOOP
:
7216 return gfc_trans_omp_teams (code
, NULL
, NULL_TREE
);
7217 case EXEC_OMP_WORKSHARE
:
7218 return gfc_trans_omp_workshare (code
, code
->ext
.omp_clauses
);
7225 gfc_trans_omp_declare_simd (gfc_namespace
*ns
)
7230 gfc_omp_declare_simd
*ods
;
7231 for (ods
= ns
->omp_declare_simd
; ods
; ods
= ods
->next
)
7233 tree c
= gfc_trans_omp_clauses (NULL
, ods
->clauses
, ods
->where
, true);
7234 tree fndecl
= ns
->proc_name
->backend_decl
;
7236 c
= tree_cons (NULL_TREE
, c
, NULL_TREE
);
7237 c
= build_tree_list (get_identifier ("omp declare simd"), c
);
7238 TREE_CHAIN (c
) = DECL_ATTRIBUTES (fndecl
);
7239 DECL_ATTRIBUTES (fndecl
) = c
;