libcpp, c, middle-end: Optimize initializers using #embed in C
[official-gcc.git] / gcc / omp-low.cc
blobda2051b027973d33e5f2fc976cfffb32ff3ac513
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2024 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-iterator.h"
41 #include "gimple-fold.h"
42 #include "gimplify.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "gimple-low.h"
54 #include "alloc-pool.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "omp-offload.h"
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
69 expressions.
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
78 struct omp_context
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.cc (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
84 copy_body_data cb;
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context *outer;
88 gimple *stmt;
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
92 splay_tree field_map;
93 tree record_type;
94 tree sender_decl;
95 tree receiver_decl;
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map;
103 tree srecord_type;
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
107 tree block_vars;
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
111 tree cancel_label;
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114 otherwise. */
115 gimple *simt_stmt;
117 /* For task reductions registered in this context, a vector containing
118 the length of the private copies block (if constant, otherwise NULL)
119 and then offsets (if constant, otherwise NULL) for each entry. */
120 vec<tree> task_reductions;
122 /* A hash map from the reduction clauses to the registered array
123 elts. */
124 hash_map<tree, unsigned> *task_reduction_map;
126 /* And a hash map from the lastprivate(conditional:) variables to their
127 corresponding tracking loop iteration variables. */
128 hash_map<tree, tree> *lastprivate_conditional_map;
130 /* And a hash map from the allocate variables to their corresponding
131 allocators. */
132 hash_map<tree, tree> *allocate_map;
134 /* A tree_list of the reduction clauses in this context. This is
135 only used for checking the consistency of OpenACC reduction
136 clauses in scan_omp_for and is not guaranteed to contain a valid
137 value outside of this function. */
138 tree local_reduction_clauses;
140 /* A tree_list of the reduction clauses in outer contexts. This is
141 only used for checking the consistency of OpenACC reduction
142 clauses in scan_omp_for and is not guaranteed to contain a valid
143 value outside of this function. */
144 tree outer_reduction_clauses;
146 /* Nesting depth of this context. Used to beautify error messages re
147 invalid gotos. The outermost ctx is depth 1, with depth 0 being
148 reserved for the main body of the function. */
149 int depth;
151 /* True if this parallel directive is nested within another. */
152 bool is_nested;
154 /* True if this construct can be cancelled. */
155 bool cancellable;
157 /* True if lower_omp_1 should look up lastprivate conditional in parent
158 context. */
159 bool combined_into_simd_safelen1;
161 /* True if there is nested scan context with inclusive clause. */
162 bool scan_inclusive;
164 /* True if there is nested scan context with exclusive clause. */
165 bool scan_exclusive;
167 /* True in the second simd loop of for simd with inscan reductions. */
168 bool for_simd_scan_phase;
170 /* True if there is order(concurrent) clause on the construct. */
171 bool order_concurrent;
173 /* True if there is bind clause on the construct (i.e. a loop construct). */
174 bool loop_p;
176 /* Only used for omp target contexts. True if a teams construct is
177 strictly nested in it. */
178 bool teams_nested_p;
180 /* Only used for omp target contexts. True if an OpenMP construct other
181 than teams is strictly nested in it. */
182 bool nonteams_nested_p;
184 /* Candidates for adjusting OpenACC privatization level. */
185 vec<tree> oacc_privatization_candidates;
188 static splay_tree all_contexts;
189 static int taskreg_nesting_level;
190 static int target_nesting_level;
191 static bitmap make_addressable_vars;
192 static bitmap global_nonaddressable_vars;
193 static vec<omp_context *> taskreg_contexts;
194 static vec<gomp_task *> task_cpyfns;
196 static void scan_omp (gimple_seq *, omp_context *);
197 static tree scan_omp_1_op (tree *, int *, void *);
198 static bool omp_maybe_offloaded_ctx (omp_context *ctx);
200 #define WALK_SUBSTMTS \
201 case GIMPLE_BIND: \
202 case GIMPLE_TRY: \
203 case GIMPLE_CATCH: \
204 case GIMPLE_EH_FILTER: \
205 case GIMPLE_ASSUME: \
206 case GIMPLE_TRANSACTION: \
207 /* The sub-statements for these should be walked. */ \
208 *handled_ops_p = false; \
209 break;
211 /* Return whether CTX represents an OpenACC 'parallel' or 'serial' construct.
212 (This doesn't include OpenACC 'kernels' decomposed parts.) */
214 static bool
215 is_oacc_parallel_or_serial (omp_context *ctx)
217 enum gimple_code outer_type = gimple_code (ctx->stmt);
218 return ((outer_type == GIMPLE_OMP_TARGET)
219 && ((gimple_omp_target_kind (ctx->stmt)
220 == GF_OMP_TARGET_KIND_OACC_PARALLEL)
221 || (gimple_omp_target_kind (ctx->stmt)
222 == GF_OMP_TARGET_KIND_OACC_SERIAL)));
225 /* Return whether CTX represents an OpenACC 'kernels' construct.
226 (This doesn't include OpenACC 'kernels' decomposed parts.) */
228 static bool
229 is_oacc_kernels (omp_context *ctx)
231 enum gimple_code outer_type = gimple_code (ctx->stmt);
232 return ((outer_type == GIMPLE_OMP_TARGET)
233 && (gimple_omp_target_kind (ctx->stmt)
234 == GF_OMP_TARGET_KIND_OACC_KERNELS));
237 /* Return whether CTX represents an OpenACC 'kernels' decomposed part. */
239 static bool
240 is_oacc_kernels_decomposed_part (omp_context *ctx)
242 enum gimple_code outer_type = gimple_code (ctx->stmt);
243 return ((outer_type == GIMPLE_OMP_TARGET)
244 && ((gimple_omp_target_kind (ctx->stmt)
245 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED)
246 || (gimple_omp_target_kind (ctx->stmt)
247 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE)
248 || (gimple_omp_target_kind (ctx->stmt)
249 == GF_OMP_TARGET_KIND_OACC_DATA_KERNELS)));
252 /* Return true if STMT corresponds to an OpenMP target region. */
253 static bool
254 is_omp_target (gimple *stmt)
256 if (gimple_code (stmt) == GIMPLE_OMP_TARGET)
258 int kind = gimple_omp_target_kind (stmt);
259 return (kind == GF_OMP_TARGET_KIND_REGION
260 || kind == GF_OMP_TARGET_KIND_DATA
261 || kind == GF_OMP_TARGET_KIND_ENTER_DATA
262 || kind == GF_OMP_TARGET_KIND_EXIT_DATA);
264 return false;
267 /* If DECL is the artificial dummy VAR_DECL created for non-static
268 data member privatization, return the underlying "this" parameter,
269 otherwise return NULL. */
271 tree
272 omp_member_access_dummy_var (tree decl)
274 if (!VAR_P (decl)
275 || !DECL_ARTIFICIAL (decl)
276 || !DECL_IGNORED_P (decl)
277 || !DECL_HAS_VALUE_EXPR_P (decl)
278 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
279 return NULL_TREE;
281 tree v = DECL_VALUE_EXPR (decl);
282 if (TREE_CODE (v) != COMPONENT_REF)
283 return NULL_TREE;
285 while (1)
286 switch (TREE_CODE (v))
288 case COMPONENT_REF:
289 case MEM_REF:
290 case INDIRECT_REF:
291 CASE_CONVERT:
292 case POINTER_PLUS_EXPR:
293 v = TREE_OPERAND (v, 0);
294 continue;
295 case PARM_DECL:
296 if (DECL_CONTEXT (v) == current_function_decl
297 && DECL_ARTIFICIAL (v)
298 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
299 return v;
300 return NULL_TREE;
301 default:
302 return NULL_TREE;
306 /* Helper for unshare_and_remap, called through walk_tree. */
308 static tree
309 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
311 tree *pair = (tree *) data;
312 if (*tp == pair[0])
314 *tp = unshare_expr (pair[1]);
315 *walk_subtrees = 0;
317 else if (IS_TYPE_OR_DECL_P (*tp))
318 *walk_subtrees = 0;
319 return NULL_TREE;
322 /* Return unshare_expr (X) with all occurrences of FROM
323 replaced with TO. */
325 static tree
326 unshare_and_remap (tree x, tree from, tree to)
328 tree pair[2] = { from, to };
329 x = unshare_expr (x);
330 walk_tree (&x, unshare_and_remap_1, pair, NULL);
331 return x;
334 /* Convenience function for calling scan_omp_1_op on tree operands. */
336 static inline tree
337 scan_omp_op (tree *tp, omp_context *ctx)
339 struct walk_stmt_info wi;
341 memset (&wi, 0, sizeof (wi));
342 wi.info = ctx;
343 wi.want_locations = true;
345 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
348 static void lower_omp (gimple_seq *, omp_context *);
349 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
350 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
352 /* Return true if CTX is for an omp parallel. */
354 static inline bool
355 is_parallel_ctx (omp_context *ctx)
357 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
361 /* Return true if CTX is for an omp task. */
363 static inline bool
364 is_task_ctx (omp_context *ctx)
366 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
370 /* Return true if CTX is for an omp taskloop. */
372 static inline bool
373 is_taskloop_ctx (omp_context *ctx)
375 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
376 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
380 /* Return true if CTX is for a host omp teams. */
382 static inline bool
383 is_host_teams_ctx (omp_context *ctx)
385 return gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
386 && gimple_omp_teams_host (as_a <gomp_teams *> (ctx->stmt));
389 /* Return true if CTX is for an omp parallel or omp task or host omp teams
390 (the last one is strictly not a task region in OpenMP speak, but we
391 need to treat it similarly). */
393 static inline bool
394 is_taskreg_ctx (omp_context *ctx)
396 return is_parallel_ctx (ctx) || is_task_ctx (ctx) || is_host_teams_ctx (ctx);
399 /* Return true if EXPR is variable sized. */
401 static inline bool
402 is_variable_sized (const_tree expr)
404 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
407 /* Lookup variables. The "maybe" form
408 allows for the variable form to not have been entered, otherwise we
409 assert that the variable must have been entered. */
411 static inline tree
412 lookup_decl (tree var, omp_context *ctx)
414 tree *n = ctx->cb.decl_map->get (var);
415 return *n;
418 static inline tree
419 maybe_lookup_decl (const_tree var, omp_context *ctx)
421 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
422 return n ? *n : NULL_TREE;
425 static inline tree
426 lookup_field (tree var, omp_context *ctx)
428 splay_tree_node n;
429 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
430 return (tree) n->value;
433 static inline tree
434 lookup_sfield (splay_tree_key key, omp_context *ctx)
436 splay_tree_node n;
437 n = splay_tree_lookup (ctx->sfield_map
438 ? ctx->sfield_map : ctx->field_map, key);
439 return (tree) n->value;
442 static inline tree
443 lookup_sfield (tree var, omp_context *ctx)
445 return lookup_sfield ((splay_tree_key) var, ctx);
448 static inline tree
449 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
451 splay_tree_node n;
452 n = splay_tree_lookup (ctx->field_map, key);
453 return n ? (tree) n->value : NULL_TREE;
456 static inline tree
457 maybe_lookup_field (tree var, omp_context *ctx)
459 return maybe_lookup_field ((splay_tree_key) var, ctx);
462 /* Return true if DECL should be copied by pointer. SHARED_CTX is
463 the parallel context if DECL is to be shared. */
465 static bool
466 use_pointer_for_field (tree decl, omp_context *shared_ctx)
468 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
469 || TYPE_ATOMIC (TREE_TYPE (decl)))
470 return true;
472 /* We can only use copy-in/copy-out semantics for shared variables
473 when we know the value is not accessible from an outer scope. */
474 if (shared_ctx)
476 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
478 /* ??? Trivially accessible from anywhere. But why would we even
479 be passing an address in this case? Should we simply assert
480 this to be false, or should we have a cleanup pass that removes
481 these from the list of mappings? */
482 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, shared_ctx)))
483 return true;
485 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
486 without analyzing the expression whether or not its location
487 is accessible to anyone else. In the case of nested parallel
488 regions it certainly may be. */
489 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
490 return true;
492 /* Do not use copy-in/copy-out for variables that have their
493 address taken. */
494 if (is_global_var (decl))
496 /* For file scope vars, track whether we've seen them as
497 non-addressable initially and in that case, keep the same
498 answer for the duration of the pass, even when they are made
499 addressable later on e.g. through reduction expansion. Global
500 variables which weren't addressable before the pass will not
501 have their privatized copies address taken. See PR91216. */
502 if (!TREE_ADDRESSABLE (decl))
504 if (!global_nonaddressable_vars)
505 global_nonaddressable_vars = BITMAP_ALLOC (NULL);
506 bitmap_set_bit (global_nonaddressable_vars, DECL_UID (decl));
508 else if (!global_nonaddressable_vars
509 || !bitmap_bit_p (global_nonaddressable_vars,
510 DECL_UID (decl)))
511 return true;
513 else if (TREE_ADDRESSABLE (decl))
514 return true;
516 /* lower_send_shared_vars only uses copy-in, but not copy-out
517 for these. */
518 if (TREE_READONLY (decl)
519 || ((TREE_CODE (decl) == RESULT_DECL
520 || TREE_CODE (decl) == PARM_DECL)
521 && DECL_BY_REFERENCE (decl)))
522 return false;
524 /* Disallow copy-in/out in nested parallel if
525 decl is shared in outer parallel, otherwise
526 each thread could store the shared variable
527 in its own copy-in location, making the
528 variable no longer really shared. */
529 if (shared_ctx->is_nested)
531 omp_context *up;
533 for (up = shared_ctx->outer; up; up = up->outer)
534 if ((is_taskreg_ctx (up)
535 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
536 && is_gimple_omp_offloaded (up->stmt)))
537 && maybe_lookup_decl (decl, up))
538 break;
540 if (up)
542 tree c;
544 if (gimple_code (up->stmt) == GIMPLE_OMP_TARGET)
546 for (c = gimple_omp_target_clauses (up->stmt);
547 c; c = OMP_CLAUSE_CHAIN (c))
548 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
549 && OMP_CLAUSE_DECL (c) == decl)
550 break;
552 else
553 for (c = gimple_omp_taskreg_clauses (up->stmt);
554 c; c = OMP_CLAUSE_CHAIN (c))
555 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
556 && OMP_CLAUSE_DECL (c) == decl)
557 break;
559 if (c)
560 goto maybe_mark_addressable_and_ret;
564 /* For tasks avoid using copy-in/out. As tasks can be
565 deferred or executed in different thread, when GOMP_task
566 returns, the task hasn't necessarily terminated. */
567 if (is_task_ctx (shared_ctx))
569 tree outer;
570 maybe_mark_addressable_and_ret:
571 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
572 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
574 /* Taking address of OUTER in lower_send_shared_vars
575 might need regimplification of everything that uses the
576 variable. */
577 if (!make_addressable_vars)
578 make_addressable_vars = BITMAP_ALLOC (NULL);
579 bitmap_set_bit (make_addressable_vars, DECL_UID (outer));
580 TREE_ADDRESSABLE (outer) = 1;
582 return true;
586 return false;
589 /* Construct a new automatic decl similar to VAR. */
591 static tree
592 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
594 tree copy = copy_var_decl (var, name, type);
596 DECL_CONTEXT (copy) = current_function_decl;
598 if (ctx)
600 DECL_CHAIN (copy) = ctx->block_vars;
601 ctx->block_vars = copy;
603 else
604 record_vars (copy);
606 /* If VAR is listed in make_addressable_vars, it wasn't
607 originally addressable, but was only later made so.
608 We don't need to take address of privatizations
609 from that var. */
610 if (TREE_ADDRESSABLE (var)
611 && ((make_addressable_vars
612 && bitmap_bit_p (make_addressable_vars, DECL_UID (var)))
613 || (global_nonaddressable_vars
614 && bitmap_bit_p (global_nonaddressable_vars, DECL_UID (var)))))
615 TREE_ADDRESSABLE (copy) = 0;
617 return copy;
620 static tree
621 omp_copy_decl_1 (tree var, omp_context *ctx)
623 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
626 /* Build tree nodes to access the field for VAR on the receiver side. */
628 static tree
629 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
631 tree x, field = lookup_field (var, ctx);
633 /* If the receiver record type was remapped in the child function,
634 remap the field into the new record type. */
635 x = maybe_lookup_field (field, ctx);
636 if (x != NULL)
637 field = x;
639 x = build_simple_mem_ref (ctx->receiver_decl);
640 TREE_THIS_NOTRAP (x) = 1;
641 x = omp_build_component_ref (x, field);
642 if (by_ref)
644 x = build_simple_mem_ref (x);
645 TREE_THIS_NOTRAP (x) = 1;
648 return x;
651 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
652 of a parallel, this is a component reference; for workshare constructs
653 this is some variable. */
655 static tree
656 build_outer_var_ref (tree var, omp_context *ctx,
657 enum omp_clause_code code = OMP_CLAUSE_ERROR)
659 tree x;
660 omp_context *outer = ctx->outer;
661 for (; outer; outer = outer->outer)
663 if (gimple_code (outer->stmt) == GIMPLE_OMP_TASKGROUP)
664 continue;
665 if (gimple_code (outer->stmt) == GIMPLE_OMP_SCOPE
666 && !maybe_lookup_decl (var, outer))
667 continue;
668 break;
671 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
672 x = var;
673 else if (is_variable_sized (var))
675 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
676 x = build_outer_var_ref (x, ctx, code);
677 x = build_simple_mem_ref (x);
679 else if (is_taskreg_ctx (ctx))
681 bool by_ref = use_pointer_for_field (var, NULL);
682 x = build_receiver_ref (var, by_ref, ctx);
684 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
685 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
686 || ctx->loop_p
687 || code == OMP_CLAUSE_ALLOCATE
688 || (code == OMP_CLAUSE_PRIVATE
689 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
690 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
691 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
693 /* #pragma omp simd isn't a worksharing construct, and can reference
694 even private vars in its linear etc. clauses.
695 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
696 to private vars in all worksharing constructs. */
697 x = NULL_TREE;
698 if (outer && is_taskreg_ctx (outer))
699 x = lookup_decl (var, outer);
700 else if (outer)
701 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
702 if (x == NULL_TREE)
703 x = var;
705 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
707 gcc_assert (outer);
708 splay_tree_node n
709 = splay_tree_lookup (outer->field_map,
710 (splay_tree_key) &DECL_UID (var));
711 if (n == NULL)
713 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, outer)))
714 x = var;
715 else
716 x = lookup_decl (var, outer);
718 else
720 tree field = (tree) n->value;
721 /* If the receiver record type was remapped in the child function,
722 remap the field into the new record type. */
723 x = maybe_lookup_field (field, outer);
724 if (x != NULL)
725 field = x;
727 x = build_simple_mem_ref (outer->receiver_decl);
728 x = omp_build_component_ref (x, field);
729 if (use_pointer_for_field (var, outer))
730 x = build_simple_mem_ref (x);
733 else if (outer)
734 x = lookup_decl (var, outer);
735 else if (omp_privatize_by_reference (var))
736 /* This can happen with orphaned constructs. If var is reference, it is
737 possible it is shared and as such valid. */
738 x = var;
739 else if (omp_member_access_dummy_var (var))
740 x = var;
741 else
742 gcc_unreachable ();
744 if (x == var)
746 tree t = omp_member_access_dummy_var (var);
747 if (t)
749 x = DECL_VALUE_EXPR (var);
750 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
751 if (o != t)
752 x = unshare_and_remap (x, t, o);
753 else
754 x = unshare_expr (x);
758 if (omp_privatize_by_reference (var))
759 x = build_simple_mem_ref (x);
761 return x;
764 /* Build tree nodes to access the field for VAR on the sender side. */
766 static tree
767 build_sender_ref (splay_tree_key key, omp_context *ctx)
769 tree field = lookup_sfield (key, ctx);
770 tree tmp = ctx->sender_decl;
771 if (POINTER_TYPE_P (TREE_TYPE (tmp)))
772 tmp = build_fold_indirect_ref (tmp);
773 return omp_build_component_ref (tmp, field);
776 static tree
777 build_sender_ref (tree var, omp_context *ctx)
779 return build_sender_ref ((splay_tree_key) var, ctx);
782 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
783 BASE_POINTERS_RESTRICT, declare the field with restrict. */
785 static void
786 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
788 tree field, type, sfield = NULL_TREE;
789 splay_tree_key key = (splay_tree_key) var;
791 if ((mask & 16) != 0)
793 key = (splay_tree_key) &DECL_NAME (var);
794 gcc_checking_assert (key != (splay_tree_key) var);
796 if ((mask & 8) != 0)
798 key = (splay_tree_key) &DECL_UID (var);
799 gcc_checking_assert (key != (splay_tree_key) var);
801 gcc_assert ((mask & 1) == 0
802 || !splay_tree_lookup (ctx->field_map, key));
803 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
804 || !splay_tree_lookup (ctx->sfield_map, key));
805 gcc_assert ((mask & 3) == 3
806 || !is_gimple_omp_oacc (ctx->stmt));
808 type = TREE_TYPE (var);
809 if ((mask & 16) != 0)
810 type = lang_hooks.decls.omp_array_data (var, true);
812 /* Prevent redeclaring the var in the split-off function with a restrict
813 pointer type. Note that we only clear type itself, restrict qualifiers in
814 the pointed-to type will be ignored by points-to analysis. */
815 if (POINTER_TYPE_P (type)
816 && TYPE_RESTRICT (type))
817 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
819 if (mask & 4)
821 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
822 type = build_pointer_type (build_pointer_type (type));
824 else if (by_ref)
825 type = build_pointer_type (type);
826 else if ((mask & (32 | 3)) == 1
827 && omp_privatize_by_reference (var))
828 type = TREE_TYPE (type);
830 field = build_decl (DECL_SOURCE_LOCATION (var),
831 FIELD_DECL, DECL_NAME (var), type);
833 /* Remember what variable this field was created for. This does have a
834 side effect of making dwarf2out ignore this member, so for helpful
835 debugging we clear it later in delete_omp_context. */
836 DECL_ABSTRACT_ORIGIN (field) = var;
837 if ((mask & 16) == 0 && type == TREE_TYPE (var))
839 SET_DECL_ALIGN (field, DECL_ALIGN (var));
840 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
841 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
843 else
844 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
846 if ((mask & 3) == 3)
848 insert_field_into_struct (ctx->record_type, field);
849 if (ctx->srecord_type)
851 sfield = build_decl (DECL_SOURCE_LOCATION (var),
852 FIELD_DECL, DECL_NAME (var), type);
853 DECL_ABSTRACT_ORIGIN (sfield) = var;
854 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
855 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
856 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
857 insert_field_into_struct (ctx->srecord_type, sfield);
860 else
862 if (ctx->srecord_type == NULL_TREE)
864 tree t;
866 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
867 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
868 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
870 sfield = build_decl (DECL_SOURCE_LOCATION (t),
871 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
872 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
873 insert_field_into_struct (ctx->srecord_type, sfield);
874 splay_tree_insert (ctx->sfield_map,
875 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
876 (splay_tree_value) sfield);
879 sfield = field;
880 insert_field_into_struct ((mask & 1) ? ctx->record_type
881 : ctx->srecord_type, field);
884 if (mask & 1)
885 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
886 if ((mask & 2) && ctx->sfield_map)
887 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
890 static tree
891 install_var_local (tree var, omp_context *ctx)
893 tree new_var = omp_copy_decl_1 (var, ctx);
894 insert_decl_map (&ctx->cb, var, new_var);
895 return new_var;
898 /* Adjust the replacement for DECL in CTX for the new context. This means
899 copying the DECL_VALUE_EXPR, and fixing up the type. */
901 static void
902 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
904 tree new_decl, size;
906 new_decl = lookup_decl (decl, ctx);
908 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
910 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
911 && DECL_HAS_VALUE_EXPR_P (decl))
913 tree ve = DECL_VALUE_EXPR (decl);
914 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
915 SET_DECL_VALUE_EXPR (new_decl, ve);
916 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
919 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
921 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
922 if (size == error_mark_node)
923 size = TYPE_SIZE (TREE_TYPE (new_decl));
924 DECL_SIZE (new_decl) = size;
926 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
927 if (size == error_mark_node)
928 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
929 DECL_SIZE_UNIT (new_decl) = size;
933 /* The callback for remap_decl. Search all containing contexts for a
934 mapping of the variable; this avoids having to duplicate the splay
935 tree ahead of time. We know a mapping doesn't already exist in the
936 given context. Create new mappings to implement default semantics. */
938 static tree
939 omp_copy_decl (tree var, copy_body_data *cb)
941 omp_context *ctx = (omp_context *) cb;
942 tree new_var;
944 if (TREE_CODE (var) == LABEL_DECL)
946 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
947 return var;
948 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
949 DECL_CONTEXT (new_var) = current_function_decl;
950 insert_decl_map (&ctx->cb, var, new_var);
951 return new_var;
954 while (!is_taskreg_ctx (ctx))
956 ctx = ctx->outer;
957 if (ctx == NULL)
958 return var;
959 new_var = maybe_lookup_decl (var, ctx);
960 if (new_var)
961 return new_var;
964 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
965 return var;
967 return error_mark_node;
970 /* Create a new context, with OUTER_CTX being the surrounding context. */
972 static omp_context *
973 new_omp_context (gimple *stmt, omp_context *outer_ctx)
975 omp_context *ctx = XCNEW (omp_context);
977 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
978 (splay_tree_value) ctx);
979 ctx->stmt = stmt;
981 if (outer_ctx)
983 ctx->outer = outer_ctx;
984 ctx->cb = outer_ctx->cb;
985 ctx->cb.block = NULL;
986 ctx->depth = outer_ctx->depth + 1;
988 else
990 ctx->cb.src_fn = current_function_decl;
991 ctx->cb.dst_fn = current_function_decl;
992 ctx->cb.src_node = cgraph_node::get (current_function_decl);
993 gcc_checking_assert (ctx->cb.src_node);
994 ctx->cb.dst_node = ctx->cb.src_node;
995 ctx->cb.src_cfun = cfun;
996 ctx->cb.copy_decl = omp_copy_decl;
997 ctx->cb.eh_lp_nr = 0;
998 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
999 ctx->cb.adjust_array_error_bounds = true;
1000 ctx->cb.dont_remap_vla_if_no_change = true;
1001 ctx->depth = 1;
1004 ctx->cb.decl_map = new hash_map<tree, tree>;
1006 return ctx;
1009 static gimple_seq maybe_catch_exception (gimple_seq);
1011 /* Finalize task copyfn. */
1013 static void
1014 finalize_task_copyfn (gomp_task *task_stmt)
1016 struct function *child_cfun;
1017 tree child_fn;
1018 gimple_seq seq = NULL, new_seq;
1019 gbind *bind;
1021 child_fn = gimple_omp_task_copy_fn (task_stmt);
1022 if (child_fn == NULL_TREE)
1023 return;
1025 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
1026 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
1028 push_cfun (child_cfun);
1029 bind = gimplify_body (child_fn, false);
1030 gimple_seq_add_stmt (&seq, bind);
1031 new_seq = maybe_catch_exception (seq);
1032 if (new_seq != seq)
1034 bind = gimple_build_bind (NULL, new_seq, NULL);
1035 seq = NULL;
1036 gimple_seq_add_stmt (&seq, bind);
1038 gimple_set_body (child_fn, seq);
1039 pop_cfun ();
1041 /* Inform the callgraph about the new function. */
1042 cgraph_node *node = cgraph_node::get_create (child_fn);
1043 node->parallelized_function = 1;
1044 cgraph_node::add_new_function (child_fn, false);
1047 /* Destroy a omp_context data structures. Called through the splay tree
1048 value delete callback. */
1050 static void
1051 delete_omp_context (splay_tree_value value)
1053 omp_context *ctx = (omp_context *) value;
1055 delete ctx->cb.decl_map;
1057 if (ctx->field_map)
1058 splay_tree_delete (ctx->field_map);
1059 if (ctx->sfield_map)
1060 splay_tree_delete (ctx->sfield_map);
1062 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1063 it produces corrupt debug information. */
1064 if (ctx->record_type)
1066 tree t;
1067 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
1068 DECL_ABSTRACT_ORIGIN (t) = NULL;
1070 if (ctx->srecord_type)
1072 tree t;
1073 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
1074 DECL_ABSTRACT_ORIGIN (t) = NULL;
1077 if (ctx->task_reduction_map)
1079 ctx->task_reductions.release ();
1080 delete ctx->task_reduction_map;
1083 delete ctx->lastprivate_conditional_map;
1084 delete ctx->allocate_map;
1086 XDELETE (ctx);
1089 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
1090 context. */
1092 static void
1093 fixup_child_record_type (omp_context *ctx)
1095 tree f, type = ctx->record_type;
1097 if (!ctx->receiver_decl)
1098 return;
1099 /* ??? It isn't sufficient to just call remap_type here, because
1100 variably_modified_type_p doesn't work the way we expect for
1101 record types. Testing each field for whether it needs remapping
1102 and creating a new record by hand works, however. */
1103 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
1104 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
1105 break;
1106 if (f)
1108 tree name, new_fields = NULL;
1110 type = lang_hooks.types.make_type (RECORD_TYPE);
1111 name = DECL_NAME (TYPE_NAME (ctx->record_type));
1112 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
1113 TYPE_DECL, name, type);
1114 TYPE_NAME (type) = name;
1116 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
1118 tree new_f = copy_node (f);
1119 DECL_CONTEXT (new_f) = type;
1120 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
1121 DECL_CHAIN (new_f) = new_fields;
1122 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
1123 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
1124 &ctx->cb, NULL);
1125 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
1126 &ctx->cb, NULL);
1127 new_fields = new_f;
1129 /* Arrange to be able to look up the receiver field
1130 given the sender field. */
1131 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1132 (splay_tree_value) new_f);
1134 TYPE_FIELDS (type) = nreverse (new_fields);
1135 layout_type (type);
1138 /* In a target region we never modify any of the pointers in *.omp_data_i,
1139 so attempt to help the optimizers. */
1140 if (is_gimple_omp_offloaded (ctx->stmt))
1141 type = build_qualified_type (type, TYPE_QUAL_CONST);
1143 TREE_TYPE (ctx->receiver_decl)
1144 = build_qualified_type (flexible_array_type_p (type)
1145 ? build_pointer_type (type)
1146 : build_reference_type (type), TYPE_QUAL_RESTRICT);
1149 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1150 specified by CLAUSES. */
1152 static void
1153 scan_sharing_clauses (tree clauses, omp_context *ctx)
1155 tree c, decl;
1156 bool scan_array_reductions = false;
1157 bool flex_array_ptr = false;
1159 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1160 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE
1161 && (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE
1162 /* omp_default_mem_alloc is 1 */
1163 || !integer_onep (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
1164 || OMP_CLAUSE_ALLOCATE_ALIGN (c) != NULL_TREE))
1166 /* The allocate clauses that appear on a target construct or on
1167 constructs in a target region must specify an allocator expression
1168 unless a requires directive with the dynamic_allocators clause
1169 is present in the same compilation unit. */
1170 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE
1171 && ((omp_requires_mask & OMP_REQUIRES_DYNAMIC_ALLOCATORS) == 0)
1172 && omp_maybe_offloaded_ctx (ctx))
1173 error_at (OMP_CLAUSE_LOCATION (c), "%<allocate%> clause must"
1174 " specify an allocator here");
1175 if (ctx->allocate_map == NULL)
1176 ctx->allocate_map = new hash_map<tree, tree>;
1177 tree val = integer_zero_node;
1178 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
1179 val = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
1180 if (OMP_CLAUSE_ALLOCATE_ALIGN (c))
1181 val = build_tree_list (val, OMP_CLAUSE_ALLOCATE_ALIGN (c));
1182 ctx->allocate_map->put (OMP_CLAUSE_DECL (c), val);
1185 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1187 bool by_ref;
1189 switch (OMP_CLAUSE_CODE (c))
1191 case OMP_CLAUSE_PRIVATE:
1192 decl = OMP_CLAUSE_DECL (c);
1193 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1194 goto do_private;
1195 else if (!is_variable_sized (decl))
1196 install_var_local (decl, ctx);
1197 break;
1199 case OMP_CLAUSE_SHARED:
1200 decl = OMP_CLAUSE_DECL (c);
1201 if (ctx->allocate_map && ctx->allocate_map->get (decl))
1202 ctx->allocate_map->remove (decl);
1203 /* Ignore shared directives in teams construct inside of
1204 target construct. */
1205 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1206 && !is_host_teams_ctx (ctx))
1208 /* Global variables don't need to be copied,
1209 the receiver side will use them directly. */
1210 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1211 if (is_global_var (odecl))
1212 break;
1213 insert_decl_map (&ctx->cb, decl, odecl);
1214 break;
1216 gcc_assert (is_taskreg_ctx (ctx));
1217 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1218 || !is_variable_sized (decl));
1219 /* Global variables don't need to be copied,
1220 the receiver side will use them directly. */
1221 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1222 break;
1223 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1225 use_pointer_for_field (decl, ctx);
1226 break;
1228 by_ref = use_pointer_for_field (decl, NULL);
1229 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1230 || TREE_ADDRESSABLE (decl)
1231 || by_ref
1232 || omp_privatize_by_reference (decl))
1234 by_ref = use_pointer_for_field (decl, ctx);
1235 install_var_field (decl, by_ref, 3, ctx);
1236 install_var_local (decl, ctx);
1237 break;
1239 /* We don't need to copy const scalar vars back. */
1240 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1241 goto do_private;
1243 case OMP_CLAUSE_REDUCTION:
1244 /* Collect 'reduction' clauses on OpenACC compute construct. */
1245 if (is_gimple_omp_oacc (ctx->stmt)
1246 && is_gimple_omp_offloaded (ctx->stmt))
1248 /* No 'reduction' clauses on OpenACC 'kernels'. */
1249 gcc_checking_assert (!is_oacc_kernels (ctx));
1250 /* Likewise, on OpenACC 'kernels' decomposed parts. */
1251 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
1253 ctx->local_reduction_clauses
1254 = tree_cons (NULL, c, ctx->local_reduction_clauses);
1256 /* FALLTHRU */
1258 case OMP_CLAUSE_IN_REDUCTION:
1259 decl = OMP_CLAUSE_DECL (c);
1260 if (ctx->allocate_map
1261 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1262 && (OMP_CLAUSE_REDUCTION_INSCAN (c)
1263 || OMP_CLAUSE_REDUCTION_TASK (c)))
1264 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1265 || is_task_ctx (ctx)))
1267 /* For now. */
1268 if (ctx->allocate_map->get (decl))
1269 ctx->allocate_map->remove (decl);
1271 if (TREE_CODE (decl) == MEM_REF)
1273 tree t = TREE_OPERAND (decl, 0);
1274 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1275 t = TREE_OPERAND (t, 0);
1276 if (INDIRECT_REF_P (t)
1277 || TREE_CODE (t) == ADDR_EXPR)
1278 t = TREE_OPERAND (t, 0);
1279 if (is_omp_target (ctx->stmt))
1281 if (is_variable_sized (t))
1283 gcc_assert (DECL_HAS_VALUE_EXPR_P (t));
1284 t = DECL_VALUE_EXPR (t);
1285 gcc_assert (INDIRECT_REF_P (t));
1286 t = TREE_OPERAND (t, 0);
1287 gcc_assert (DECL_P (t));
1289 tree at = t;
1290 if (ctx->outer)
1291 scan_omp_op (&at, ctx->outer);
1292 tree nt = omp_copy_decl_1 (at, ctx->outer);
1293 splay_tree_insert (ctx->field_map,
1294 (splay_tree_key) &DECL_CONTEXT (t),
1295 (splay_tree_value) nt);
1296 if (at != t)
1297 splay_tree_insert (ctx->field_map,
1298 (splay_tree_key) &DECL_CONTEXT (at),
1299 (splay_tree_value) nt);
1300 break;
1302 install_var_local (t, ctx);
1303 if (is_taskreg_ctx (ctx)
1304 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1305 || (is_task_ctx (ctx)
1306 && (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
1307 || (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1308 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t)))
1309 == POINTER_TYPE)))))
1310 && !is_variable_sized (t)
1311 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
1312 || (!OMP_CLAUSE_REDUCTION_TASK (c)
1313 && !is_task_ctx (ctx))))
1315 by_ref = use_pointer_for_field (t, NULL);
1316 if (is_task_ctx (ctx)
1317 && TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1318 && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == POINTER_TYPE)
1320 install_var_field (t, false, 1, ctx);
1321 install_var_field (t, by_ref, 2, ctx);
1323 else
1324 install_var_field (t, by_ref, 3, ctx);
1326 break;
1328 if (is_omp_target (ctx->stmt))
1330 tree at = decl;
1331 if (ctx->outer)
1332 scan_omp_op (&at, ctx->outer);
1333 tree nt = omp_copy_decl_1 (at, ctx->outer);
1334 splay_tree_insert (ctx->field_map,
1335 (splay_tree_key) &DECL_CONTEXT (decl),
1336 (splay_tree_value) nt);
1337 if (at != decl)
1338 splay_tree_insert (ctx->field_map,
1339 (splay_tree_key) &DECL_CONTEXT (at),
1340 (splay_tree_value) nt);
1341 break;
1343 if (is_task_ctx (ctx)
1344 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1345 && OMP_CLAUSE_REDUCTION_TASK (c)
1346 && is_parallel_ctx (ctx)))
1348 /* Global variables don't need to be copied,
1349 the receiver side will use them directly. */
1350 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1352 by_ref = use_pointer_for_field (decl, ctx);
1353 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
1354 install_var_field (decl, by_ref, 3, ctx);
1356 install_var_local (decl, ctx);
1357 break;
1359 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1360 && OMP_CLAUSE_REDUCTION_TASK (c))
1362 install_var_local (decl, ctx);
1363 break;
1365 goto do_private;
1367 case OMP_CLAUSE_LASTPRIVATE:
1368 /* Let the corresponding firstprivate clause create
1369 the variable. */
1370 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1371 break;
1372 /* FALLTHRU */
1374 case OMP_CLAUSE_FIRSTPRIVATE:
1375 case OMP_CLAUSE_LINEAR:
1376 decl = OMP_CLAUSE_DECL (c);
1377 do_private:
1378 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1379 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR
1380 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
1381 && is_gimple_omp_offloaded (ctx->stmt))
1383 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1384 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR
1385 && lang_hooks.decls.omp_array_data (decl, true)))
1387 by_ref = !omp_privatize_by_reference (decl);
1388 install_var_field (decl, by_ref, 3, ctx);
1390 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
1392 if (INDIRECT_REF_P (decl))
1393 decl = TREE_OPERAND (decl, 0);
1394 install_var_field (decl, true, 3, ctx);
1396 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1397 install_var_field (decl, true, 3, ctx);
1398 else
1399 install_var_field (decl, false, 3, ctx);
1401 if (is_variable_sized (decl))
1403 if (is_task_ctx (ctx))
1405 if (ctx->allocate_map
1406 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1408 /* For now. */
1409 if (ctx->allocate_map->get (decl))
1410 ctx->allocate_map->remove (decl);
1412 install_var_field (decl, false, 1, ctx);
1414 break;
1416 else if (is_taskreg_ctx (ctx))
1418 bool global
1419 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1420 by_ref = use_pointer_for_field (decl, NULL);
1422 if (is_task_ctx (ctx)
1423 && (global || by_ref || omp_privatize_by_reference (decl)))
1425 if (ctx->allocate_map
1426 && ctx->allocate_map->get (decl))
1427 install_var_field (decl, by_ref, 32 | 1, ctx);
1428 else
1429 install_var_field (decl, false, 1, ctx);
1430 if (!global)
1431 install_var_field (decl, by_ref, 2, ctx);
1433 else if (!global)
1434 install_var_field (decl, by_ref, 3, ctx);
1436 install_var_local (decl, ctx);
1437 /* For descr arrays on target: firstprivatize data + attach ptr. */
1438 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1439 && is_gimple_omp_offloaded (ctx->stmt)
1440 && !is_gimple_omp_oacc (ctx->stmt)
1441 && lang_hooks.decls.omp_array_data (decl, true))
1443 install_var_field (decl, false, 16 | 3, ctx);
1444 install_var_field (decl, true, 8 | 3, ctx);
1446 break;
1448 case OMP_CLAUSE_USE_DEVICE_PTR:
1449 case OMP_CLAUSE_USE_DEVICE_ADDR:
1450 decl = OMP_CLAUSE_DECL (c);
1452 /* Fortran array descriptors. */
1453 if (lang_hooks.decls.omp_array_data (decl, true))
1454 install_var_field (decl, false, 19, ctx);
1455 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
1456 && !omp_privatize_by_reference (decl)
1457 && !omp_is_allocatable_or_ptr (decl))
1458 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1459 install_var_field (decl, true, 11, ctx);
1460 else
1461 install_var_field (decl, false, 11, ctx);
1462 if (DECL_SIZE (decl)
1463 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1465 tree decl2 = DECL_VALUE_EXPR (decl);
1466 gcc_assert (INDIRECT_REF_P (decl2));
1467 decl2 = TREE_OPERAND (decl2, 0);
1468 gcc_assert (DECL_P (decl2));
1469 install_var_local (decl2, ctx);
1471 install_var_local (decl, ctx);
1472 break;
1474 case OMP_CLAUSE_HAS_DEVICE_ADDR:
1475 decl = OMP_CLAUSE_DECL (c);
1476 while (INDIRECT_REF_P (decl)
1477 || TREE_CODE (decl) == ARRAY_REF)
1478 decl = TREE_OPERAND (decl, 0);
1479 goto do_private;
1481 case OMP_CLAUSE_IS_DEVICE_PTR:
1482 decl = OMP_CLAUSE_DECL (c);
1483 goto do_private;
1485 case OMP_CLAUSE__LOOPTEMP_:
1486 case OMP_CLAUSE__REDUCTEMP_:
1487 gcc_assert (is_taskreg_ctx (ctx));
1488 decl = OMP_CLAUSE_DECL (c);
1489 install_var_field (decl, false, 3, ctx);
1490 install_var_local (decl, ctx);
1491 break;
1493 case OMP_CLAUSE_COPYPRIVATE:
1494 case OMP_CLAUSE_COPYIN:
1495 decl = OMP_CLAUSE_DECL (c);
1496 by_ref = use_pointer_for_field (decl, NULL);
1497 install_var_field (decl, by_ref, 3, ctx);
1498 break;
1500 case OMP_CLAUSE_FINAL:
1501 case OMP_CLAUSE_IF:
1502 case OMP_CLAUSE_SELF:
1503 case OMP_CLAUSE_NUM_THREADS:
1504 case OMP_CLAUSE_NUM_TEAMS:
1505 case OMP_CLAUSE_THREAD_LIMIT:
1506 case OMP_CLAUSE_DEVICE:
1507 case OMP_CLAUSE_SCHEDULE:
1508 case OMP_CLAUSE_DIST_SCHEDULE:
1509 case OMP_CLAUSE_DEPEND:
1510 case OMP_CLAUSE_PRIORITY:
1511 case OMP_CLAUSE_GRAINSIZE:
1512 case OMP_CLAUSE_NUM_TASKS:
1513 case OMP_CLAUSE_NUM_GANGS:
1514 case OMP_CLAUSE_NUM_WORKERS:
1515 case OMP_CLAUSE_VECTOR_LENGTH:
1516 case OMP_CLAUSE_DETACH:
1517 case OMP_CLAUSE_FILTER:
1518 if (ctx->outer)
1519 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1520 break;
1522 case OMP_CLAUSE_TO:
1523 case OMP_CLAUSE_FROM:
1524 case OMP_CLAUSE_MAP:
1525 if (ctx->outer)
1526 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1527 decl = OMP_CLAUSE_DECL (c);
1528 /* If requested, make 'decl' addressable. */
1529 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1530 && OMP_CLAUSE_MAP_DECL_MAKE_ADDRESSABLE (c))
1532 gcc_checking_assert (DECL_P (decl));
1534 bool decl_addressable = TREE_ADDRESSABLE (decl);
1535 if (!decl_addressable)
1537 if (!make_addressable_vars)
1538 make_addressable_vars = BITMAP_ALLOC (NULL);
1539 bitmap_set_bit (make_addressable_vars, DECL_UID (decl));
1540 TREE_ADDRESSABLE (decl) = 1;
1543 if (dump_enabled_p ())
1545 location_t loc = OMP_CLAUSE_LOCATION (c);
1546 const dump_user_location_t d_u_loc
1547 = dump_user_location_t::from_location_t (loc);
1548 /* PR100695 "Format decoder, quoting in 'dump_printf' etc." */
1549 #if __GNUC__ >= 10
1550 # pragma GCC diagnostic push
1551 # pragma GCC diagnostic ignored "-Wformat"
1552 #endif
1553 if (!decl_addressable)
1554 dump_printf_loc (MSG_NOTE, d_u_loc,
1555 "variable %<%T%>"
1556 " made addressable\n",
1557 decl);
1558 else
1559 dump_printf_loc (MSG_NOTE, d_u_loc,
1560 "variable %<%T%>"
1561 " already made addressable\n",
1562 decl);
1563 #if __GNUC__ >= 10
1564 # pragma GCC diagnostic pop
1565 #endif
1568 /* Done. */
1569 OMP_CLAUSE_MAP_DECL_MAKE_ADDRESSABLE (c) = 0;
1571 /* Global variables with "omp declare target" attribute
1572 don't need to be copied, the receiver side will use them
1573 directly. However, global variables with "omp declare target link"
1574 attribute need to be copied. Or when ALWAYS modifier is used. */
1575 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1576 && DECL_P (decl)
1577 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1578 && (OMP_CLAUSE_MAP_KIND (c)
1579 != GOMP_MAP_FIRSTPRIVATE_REFERENCE)
1580 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
1581 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH)
1582 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1583 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1584 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1585 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
1586 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_PRESENT_TO
1587 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_PRESENT_FROM
1588 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_PRESENT_TOFROM
1589 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
1590 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1591 && varpool_node::get_create (decl)->offloadable
1592 && !lookup_attribute ("omp declare target link",
1593 DECL_ATTRIBUTES (decl)))
1594 break;
1595 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1596 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1598 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1599 not offloaded; there is nothing to map for those. */
1600 if (!is_gimple_omp_offloaded (ctx->stmt)
1601 && !POINTER_TYPE_P (TREE_TYPE (decl))
1602 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1603 break;
1605 if (!flex_array_ptr)
1606 flex_array_ptr = lang_hooks.decls.omp_deep_mapping_p (ctx->stmt, c);
1607 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1608 && DECL_P (decl)
1609 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
1610 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
1611 && is_omp_target (ctx->stmt))
1613 /* If this is an offloaded region, an attach operation should
1614 only exist when the pointer variable is mapped in a prior
1615 clause. An exception is if we have a reference (to pointer):
1616 in that case we should have mapped "*decl" in a previous
1617 mapping instead of "decl". Skip the assertion in that case.
1618 If we had an error, we may not have attempted to sort clauses
1619 properly, so avoid the test. */
1620 if (TREE_CODE (TREE_TYPE (decl)) != REFERENCE_TYPE
1621 && is_gimple_omp_offloaded (ctx->stmt)
1622 && !seen_error ())
1623 gcc_assert
1624 (maybe_lookup_decl (decl, ctx)
1625 || (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1626 && lookup_attribute ("omp declare target",
1627 DECL_ATTRIBUTES (decl))));
1629 /* By itself, attach/detach is generated as part of pointer
1630 variable mapping and should not create new variables in the
1631 offloaded region, however sender refs for it must be created
1632 for its address to be passed to the runtime. */
1633 tree field
1634 = build_decl (OMP_CLAUSE_LOCATION (c),
1635 FIELD_DECL, NULL_TREE, ptr_type_node);
1636 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1637 insert_field_into_struct (ctx->record_type, field);
1638 /* To not clash with a map of the pointer variable itself,
1639 attach/detach maps have their field looked up by the *clause*
1640 tree expression, not the decl. */
1641 gcc_assert (!splay_tree_lookup (ctx->field_map,
1642 (splay_tree_key) c));
1643 splay_tree_insert (ctx->field_map, (splay_tree_key) c,
1644 (splay_tree_value) field);
1645 break;
1647 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1648 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1649 || (OMP_CLAUSE_MAP_KIND (c)
1650 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1652 if (TREE_CODE (decl) == COMPONENT_REF
1653 || (INDIRECT_REF_P (decl)
1654 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1655 && (((TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1656 == REFERENCE_TYPE)
1657 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1658 == POINTER_TYPE)))))
1659 break;
1660 if (DECL_SIZE (decl)
1661 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1663 tree decl2 = DECL_VALUE_EXPR (decl);
1664 gcc_assert (INDIRECT_REF_P (decl2));
1665 decl2 = TREE_OPERAND (decl2, 0);
1666 gcc_assert (DECL_P (decl2));
1667 install_var_local (decl2, ctx);
1669 install_var_local (decl, ctx);
1670 break;
1672 if (DECL_P (decl))
1674 if (DECL_SIZE (decl)
1675 && !poly_int_tree_p (DECL_SIZE (decl)))
1677 tree decl2 = DECL_VALUE_EXPR (decl);
1678 gcc_assert (INDIRECT_REF_P (decl2));
1679 decl2 = TREE_OPERAND (decl2, 0);
1680 gcc_assert (DECL_P (decl2));
1681 install_var_field (decl2, true, 3, ctx);
1682 install_var_local (decl2, ctx);
1683 install_var_local (decl, ctx);
1685 else
1687 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1688 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1689 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1690 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1691 install_var_field (decl, true, 7, ctx);
1692 else
1693 install_var_field (decl, true, 3, ctx);
1694 if (is_gimple_omp_offloaded (ctx->stmt)
1695 && !(is_gimple_omp_oacc (ctx->stmt)
1696 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
1697 install_var_local (decl, ctx);
1700 else
1702 tree base = get_base_address (decl);
1703 tree nc = OMP_CLAUSE_CHAIN (c);
1704 if (DECL_P (base)
1705 && nc != NULL_TREE
1706 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1707 && OMP_CLAUSE_DECL (nc) == base
1708 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1709 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1711 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1712 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1714 else
1716 if (ctx->outer)
1718 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1719 decl = OMP_CLAUSE_DECL (c);
1721 gcc_assert (!splay_tree_lookup (ctx->field_map,
1722 (splay_tree_key) decl));
1723 tree field
1724 = build_decl (OMP_CLAUSE_LOCATION (c),
1725 FIELD_DECL, NULL_TREE, ptr_type_node);
1726 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1727 insert_field_into_struct (ctx->record_type, field);
1728 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1729 (splay_tree_value) field);
1732 break;
1734 case OMP_CLAUSE_ORDER:
1735 ctx->order_concurrent = true;
1736 break;
1738 case OMP_CLAUSE_BIND:
1739 ctx->loop_p = true;
1740 break;
1742 case OMP_CLAUSE_NOWAIT:
1743 case OMP_CLAUSE_ORDERED:
1744 case OMP_CLAUSE_COLLAPSE:
1745 case OMP_CLAUSE_UNTIED:
1746 case OMP_CLAUSE_MERGEABLE:
1747 case OMP_CLAUSE_PROC_BIND:
1748 case OMP_CLAUSE_SAFELEN:
1749 case OMP_CLAUSE_SIMDLEN:
1750 case OMP_CLAUSE_THREADS:
1751 case OMP_CLAUSE_SIMD:
1752 case OMP_CLAUSE_NOGROUP:
1753 case OMP_CLAUSE_DEFAULTMAP:
1754 case OMP_CLAUSE_ASYNC:
1755 case OMP_CLAUSE_WAIT:
1756 case OMP_CLAUSE_GANG:
1757 case OMP_CLAUSE_WORKER:
1758 case OMP_CLAUSE_VECTOR:
1759 case OMP_CLAUSE_INDEPENDENT:
1760 case OMP_CLAUSE_AUTO:
1761 case OMP_CLAUSE_SEQ:
1762 case OMP_CLAUSE_TILE:
1763 case OMP_CLAUSE__SIMT_:
1764 case OMP_CLAUSE_DEFAULT:
1765 case OMP_CLAUSE_NONTEMPORAL:
1766 case OMP_CLAUSE_IF_PRESENT:
1767 case OMP_CLAUSE_FINALIZE:
1768 case OMP_CLAUSE_TASK_REDUCTION:
1769 case OMP_CLAUSE_ALLOCATE:
1770 break;
1772 case OMP_CLAUSE_ALIGNED:
1773 decl = OMP_CLAUSE_DECL (c);
1774 if (is_global_var (decl)
1775 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1776 install_var_local (decl, ctx);
1777 break;
1779 case OMP_CLAUSE__CONDTEMP_:
1780 decl = OMP_CLAUSE_DECL (c);
1781 if (is_parallel_ctx (ctx))
1783 install_var_field (decl, false, 3, ctx);
1784 install_var_local (decl, ctx);
1786 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
1787 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
1788 && !OMP_CLAUSE__CONDTEMP__ITER (c))
1789 install_var_local (decl, ctx);
1790 break;
1792 case OMP_CLAUSE__CACHE_:
1793 case OMP_CLAUSE_NOHOST:
1794 default:
1795 gcc_unreachable ();
1799 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1801 switch (OMP_CLAUSE_CODE (c))
1803 case OMP_CLAUSE_LASTPRIVATE:
1804 /* Let the corresponding firstprivate clause create
1805 the variable. */
1806 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1807 scan_array_reductions = true;
1808 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1809 break;
1810 /* FALLTHRU */
1812 case OMP_CLAUSE_FIRSTPRIVATE:
1813 case OMP_CLAUSE_PRIVATE:
1814 case OMP_CLAUSE_LINEAR:
1815 case OMP_CLAUSE_HAS_DEVICE_ADDR:
1816 case OMP_CLAUSE_IS_DEVICE_PTR:
1817 decl = OMP_CLAUSE_DECL (c);
1818 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
1820 while (INDIRECT_REF_P (decl)
1821 || TREE_CODE (decl) == ARRAY_REF)
1822 decl = TREE_OPERAND (decl, 0);
1825 if (is_variable_sized (decl))
1827 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1828 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR
1829 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
1830 && is_gimple_omp_offloaded (ctx->stmt))
1832 tree decl2 = DECL_VALUE_EXPR (decl);
1833 gcc_assert (INDIRECT_REF_P (decl2));
1834 decl2 = TREE_OPERAND (decl2, 0);
1835 gcc_assert (DECL_P (decl2));
1836 install_var_local (decl2, ctx);
1837 fixup_remapped_decl (decl2, ctx, false);
1839 install_var_local (decl, ctx);
1841 fixup_remapped_decl (decl, ctx,
1842 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1843 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1844 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1845 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1846 scan_array_reductions = true;
1847 break;
1849 case OMP_CLAUSE_REDUCTION:
1850 case OMP_CLAUSE_IN_REDUCTION:
1851 decl = OMP_CLAUSE_DECL (c);
1852 if (TREE_CODE (decl) != MEM_REF && !is_omp_target (ctx->stmt))
1854 if (is_variable_sized (decl))
1855 install_var_local (decl, ctx);
1856 fixup_remapped_decl (decl, ctx, false);
1858 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1859 scan_array_reductions = true;
1860 break;
1862 case OMP_CLAUSE_TASK_REDUCTION:
1863 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1864 scan_array_reductions = true;
1865 break;
1867 case OMP_CLAUSE_SHARED:
1868 /* Ignore shared directives in teams construct inside of
1869 target construct. */
1870 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1871 && !is_host_teams_ctx (ctx))
1872 break;
1873 decl = OMP_CLAUSE_DECL (c);
1874 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1875 break;
1876 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1878 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1879 ctx->outer)))
1880 break;
1881 bool by_ref = use_pointer_for_field (decl, ctx);
1882 install_var_field (decl, by_ref, 11, ctx);
1883 break;
1885 fixup_remapped_decl (decl, ctx, false);
1886 break;
1888 case OMP_CLAUSE_MAP:
1889 if (!is_gimple_omp_offloaded (ctx->stmt))
1890 break;
1891 decl = OMP_CLAUSE_DECL (c);
1892 if (DECL_P (decl)
1893 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1894 && (OMP_CLAUSE_MAP_KIND (c)
1895 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1896 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1897 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1898 && varpool_node::get_create (decl)->offloadable)
1899 break;
1900 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
1901 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
1902 && is_omp_target (ctx->stmt)
1903 && !is_gimple_omp_offloaded (ctx->stmt))
1904 break;
1905 if (DECL_P (decl))
1907 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1908 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1909 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1910 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1912 tree new_decl = lookup_decl (decl, ctx);
1913 TREE_TYPE (new_decl)
1914 = remap_type (TREE_TYPE (decl), &ctx->cb);
1916 else if (DECL_SIZE (decl)
1917 && !poly_int_tree_p (DECL_SIZE (decl)))
1919 tree decl2 = DECL_VALUE_EXPR (decl);
1920 gcc_assert (INDIRECT_REF_P (decl2));
1921 decl2 = TREE_OPERAND (decl2, 0);
1922 gcc_assert (DECL_P (decl2));
1923 fixup_remapped_decl (decl2, ctx, false);
1924 fixup_remapped_decl (decl, ctx, true);
1926 else
1927 fixup_remapped_decl (decl, ctx, false);
1929 break;
1931 case OMP_CLAUSE_COPYPRIVATE:
1932 case OMP_CLAUSE_COPYIN:
1933 case OMP_CLAUSE_DEFAULT:
1934 case OMP_CLAUSE_IF:
1935 case OMP_CLAUSE_SELF:
1936 case OMP_CLAUSE_NUM_THREADS:
1937 case OMP_CLAUSE_NUM_TEAMS:
1938 case OMP_CLAUSE_THREAD_LIMIT:
1939 case OMP_CLAUSE_DEVICE:
1940 case OMP_CLAUSE_SCHEDULE:
1941 case OMP_CLAUSE_DIST_SCHEDULE:
1942 case OMP_CLAUSE_NOWAIT:
1943 case OMP_CLAUSE_ORDERED:
1944 case OMP_CLAUSE_COLLAPSE:
1945 case OMP_CLAUSE_UNTIED:
1946 case OMP_CLAUSE_FINAL:
1947 case OMP_CLAUSE_MERGEABLE:
1948 case OMP_CLAUSE_PROC_BIND:
1949 case OMP_CLAUSE_SAFELEN:
1950 case OMP_CLAUSE_SIMDLEN:
1951 case OMP_CLAUSE_ALIGNED:
1952 case OMP_CLAUSE_DEPEND:
1953 case OMP_CLAUSE_DETACH:
1954 case OMP_CLAUSE_ALLOCATE:
1955 case OMP_CLAUSE__LOOPTEMP_:
1956 case OMP_CLAUSE__REDUCTEMP_:
1957 case OMP_CLAUSE_TO:
1958 case OMP_CLAUSE_FROM:
1959 case OMP_CLAUSE_PRIORITY:
1960 case OMP_CLAUSE_GRAINSIZE:
1961 case OMP_CLAUSE_NUM_TASKS:
1962 case OMP_CLAUSE_THREADS:
1963 case OMP_CLAUSE_SIMD:
1964 case OMP_CLAUSE_NOGROUP:
1965 case OMP_CLAUSE_DEFAULTMAP:
1966 case OMP_CLAUSE_ORDER:
1967 case OMP_CLAUSE_BIND:
1968 case OMP_CLAUSE_USE_DEVICE_PTR:
1969 case OMP_CLAUSE_USE_DEVICE_ADDR:
1970 case OMP_CLAUSE_NONTEMPORAL:
1971 case OMP_CLAUSE_ASYNC:
1972 case OMP_CLAUSE_WAIT:
1973 case OMP_CLAUSE_NUM_GANGS:
1974 case OMP_CLAUSE_NUM_WORKERS:
1975 case OMP_CLAUSE_VECTOR_LENGTH:
1976 case OMP_CLAUSE_GANG:
1977 case OMP_CLAUSE_WORKER:
1978 case OMP_CLAUSE_VECTOR:
1979 case OMP_CLAUSE_INDEPENDENT:
1980 case OMP_CLAUSE_AUTO:
1981 case OMP_CLAUSE_SEQ:
1982 case OMP_CLAUSE_TILE:
1983 case OMP_CLAUSE__SIMT_:
1984 case OMP_CLAUSE_IF_PRESENT:
1985 case OMP_CLAUSE_FINALIZE:
1986 case OMP_CLAUSE_FILTER:
1987 case OMP_CLAUSE__CONDTEMP_:
1988 break;
1990 case OMP_CLAUSE__CACHE_:
1991 case OMP_CLAUSE_NOHOST:
1992 default:
1993 gcc_unreachable ();
1997 gcc_checking_assert (!scan_array_reductions
1998 || !is_gimple_omp_oacc (ctx->stmt));
1999 if (scan_array_reductions)
2001 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2002 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
2003 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
2004 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
2005 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
2007 omp_context *rctx = ctx;
2008 if (is_omp_target (ctx->stmt))
2009 rctx = ctx->outer;
2010 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), rctx);
2011 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), rctx);
2013 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
2014 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
2015 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
2016 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
2017 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
2018 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
2020 if (flex_array_ptr)
2022 tree field = build_range_type (size_type_node,
2023 build_int_cstu (size_type_node, 0),
2024 NULL_TREE);
2025 field = build_array_type (ptr_type_node, field);
2026 field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE, field);
2027 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
2028 DECL_CONTEXT (field) = ctx->record_type;
2029 DECL_CHAIN (field) = TYPE_FIELDS (ctx->record_type);
2030 TYPE_FIELDS (ctx->record_type) = field;
2034 /* Create a new name for omp child function. Returns an identifier. */
2036 static tree
2037 create_omp_child_function_name (bool task_copy)
2039 return clone_function_name_numbered (current_function_decl,
2040 task_copy ? "_omp_cpyfn" : "_omp_fn");
2043 /* Return true if CTX may belong to offloaded code: either if current function
2044 is offloaded, or any enclosing context corresponds to a target region. */
2046 static bool
2047 omp_maybe_offloaded_ctx (omp_context *ctx)
2049 if (cgraph_node::get (current_function_decl)->offloadable)
2050 return true;
2051 for (; ctx; ctx = ctx->outer)
2052 if (is_gimple_omp_offloaded (ctx->stmt))
2053 return true;
2054 return false;
2057 /* Build a decl for the omp child function. It'll not contain a body
2058 yet, just the bare decl. */
2060 static void
2061 create_omp_child_function (omp_context *ctx, bool task_copy)
2063 tree decl, type, name, t;
2065 name = create_omp_child_function_name (task_copy);
2066 if (task_copy)
2067 type = build_function_type_list (void_type_node, ptr_type_node,
2068 ptr_type_node, NULL_TREE);
2069 else
2070 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
2072 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
2074 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
2075 || !task_copy);
2076 if (!task_copy)
2077 ctx->cb.dst_fn = decl;
2078 else
2079 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
2081 TREE_STATIC (decl) = 1;
2082 TREE_USED (decl) = 1;
2083 DECL_ARTIFICIAL (decl) = 1;
2084 DECL_IGNORED_P (decl) = 0;
2085 TREE_PUBLIC (decl) = 0;
2086 DECL_UNINLINABLE (decl) = 1;
2087 DECL_EXTERNAL (decl) = 0;
2088 DECL_CONTEXT (decl) = NULL_TREE;
2089 DECL_INITIAL (decl) = make_node (BLOCK);
2090 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
2091 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
2092 /* Remove omp declare simd attribute from the new attributes. */
2093 if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
2095 while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
2096 a = a2;
2097 a = TREE_CHAIN (a);
2098 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
2099 if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
2100 *p = TREE_CHAIN (*p);
2101 else
2103 tree chain = TREE_CHAIN (*p);
2104 *p = copy_node (*p);
2105 p = &TREE_CHAIN (*p);
2106 *p = chain;
2109 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
2110 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
2111 DECL_FUNCTION_SPECIFIC_TARGET (decl)
2112 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
2113 DECL_FUNCTION_VERSIONED (decl)
2114 = DECL_FUNCTION_VERSIONED (current_function_decl);
2116 if (omp_maybe_offloaded_ctx (ctx))
2118 cgraph_node::get_create (decl)->offloadable = 1;
2119 if (ENABLE_OFFLOADING)
2120 g->have_offload = true;
2123 if (cgraph_node::get_create (decl)->offloadable)
2125 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
2126 ? "omp target entrypoint"
2127 : "omp declare target");
2128 if (lookup_attribute ("omp declare target",
2129 DECL_ATTRIBUTES (current_function_decl)))
2131 if (is_gimple_omp_offloaded (ctx->stmt))
2132 DECL_ATTRIBUTES (decl)
2133 = remove_attribute ("omp declare target",
2134 copy_list (DECL_ATTRIBUTES (decl)));
2135 else
2136 target_attr = NULL;
2138 if (target_attr
2139 && is_gimple_omp_offloaded (ctx->stmt)
2140 && lookup_attribute ("noclone", DECL_ATTRIBUTES (decl)) == NULL_TREE)
2141 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("noclone"),
2142 NULL_TREE, DECL_ATTRIBUTES (decl));
2143 if (target_attr)
2144 DECL_ATTRIBUTES (decl)
2145 = tree_cons (get_identifier (target_attr),
2146 NULL_TREE, DECL_ATTRIBUTES (decl));
2149 t = build_decl (DECL_SOURCE_LOCATION (decl),
2150 RESULT_DECL, NULL_TREE, void_type_node);
2151 DECL_ARTIFICIAL (t) = 1;
2152 DECL_IGNORED_P (t) = 1;
2153 DECL_CONTEXT (t) = decl;
2154 DECL_RESULT (decl) = t;
2156 tree data_name = get_identifier (".omp_data_i");
2157 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
2158 ptr_type_node);
2159 DECL_ARTIFICIAL (t) = 1;
2160 DECL_NAMELESS (t) = 1;
2161 DECL_ARG_TYPE (t) = ptr_type_node;
2162 DECL_CONTEXT (t) = current_function_decl;
2163 TREE_USED (t) = 1;
2164 TREE_READONLY (t) = 1;
2165 DECL_ARGUMENTS (decl) = t;
2166 if (!task_copy)
2167 ctx->receiver_decl = t;
2168 else
2170 t = build_decl (DECL_SOURCE_LOCATION (decl),
2171 PARM_DECL, get_identifier (".omp_data_o"),
2172 ptr_type_node);
2173 DECL_ARTIFICIAL (t) = 1;
2174 DECL_NAMELESS (t) = 1;
2175 DECL_ARG_TYPE (t) = ptr_type_node;
2176 DECL_CONTEXT (t) = current_function_decl;
2177 TREE_USED (t) = 1;
2178 TREE_ADDRESSABLE (t) = 1;
2179 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
2180 DECL_ARGUMENTS (decl) = t;
2183 /* Allocate memory for the function structure. The call to
2184 allocate_struct_function clobbers CFUN, so we need to restore
2185 it afterward. */
2186 push_struct_function (decl);
2187 cfun->function_end_locus = gimple_location (ctx->stmt);
2188 init_tree_ssa (cfun);
2189 pop_cfun ();
2192 /* Callback for walk_gimple_seq. Check if combined parallel
2193 contains gimple_omp_for_combined_into_p OMP_FOR. */
2195 tree
2196 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
2197 bool *handled_ops_p,
2198 struct walk_stmt_info *wi)
2200 gimple *stmt = gsi_stmt (*gsi_p);
2202 *handled_ops_p = true;
2203 switch (gimple_code (stmt))
2205 WALK_SUBSTMTS;
2207 case GIMPLE_OMP_FOR:
2208 if (gimple_omp_for_combined_into_p (stmt)
2209 && gimple_omp_for_kind (stmt)
2210 == *(const enum gf_mask *) (wi->info))
2212 wi->info = stmt;
2213 return integer_zero_node;
2215 break;
2216 default:
2217 break;
2219 return NULL;
2222 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
2224 static void
2225 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
2226 omp_context *outer_ctx)
2228 struct walk_stmt_info wi;
2230 memset (&wi, 0, sizeof (wi));
2231 wi.val_only = true;
2232 wi.info = (void *) &msk;
2233 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
2234 if (wi.info != (void *) &msk)
2236 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
2237 struct omp_for_data fd;
2238 omp_extract_for_data (for_stmt, &fd, NULL);
2239 /* We need two temporaries with fd.loop.v type (istart/iend)
2240 and then (fd.collapse - 1) temporaries with the same
2241 type for count2 ... countN-1 vars if not constant. */
2242 size_t count = 2, i;
2243 tree type = fd.iter_type;
2244 if (fd.collapse > 1
2245 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
2247 count += fd.collapse - 1;
2248 /* If there are lastprivate clauses on the inner
2249 GIMPLE_OMP_FOR, add one more temporaries for the total number
2250 of iterations (product of count1 ... countN-1). */
2251 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
2252 OMP_CLAUSE_LASTPRIVATE)
2253 || (msk == GF_OMP_FOR_KIND_FOR
2254 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
2255 OMP_CLAUSE_LASTPRIVATE)))
2257 tree temp = create_tmp_var (type);
2258 tree c = build_omp_clause (UNKNOWN_LOCATION,
2259 OMP_CLAUSE__LOOPTEMP_);
2260 insert_decl_map (&outer_ctx->cb, temp, temp);
2261 OMP_CLAUSE_DECL (c) = temp;
2262 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2263 gimple_omp_taskreg_set_clauses (stmt, c);
2265 if (fd.non_rect
2266 && fd.last_nonrect == fd.first_nonrect + 1)
2267 if (tree v = gimple_omp_for_index (for_stmt, fd.last_nonrect))
2268 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
2270 v = gimple_omp_for_index (for_stmt, fd.first_nonrect);
2271 tree type2 = TREE_TYPE (v);
2272 count++;
2273 for (i = 0; i < 3; i++)
2275 tree temp = create_tmp_var (type2);
2276 tree c = build_omp_clause (UNKNOWN_LOCATION,
2277 OMP_CLAUSE__LOOPTEMP_);
2278 insert_decl_map (&outer_ctx->cb, temp, temp);
2279 OMP_CLAUSE_DECL (c) = temp;
2280 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2281 gimple_omp_taskreg_set_clauses (stmt, c);
2285 for (i = 0; i < count; i++)
2287 tree temp = create_tmp_var (type);
2288 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
2289 insert_decl_map (&outer_ctx->cb, temp, temp);
2290 OMP_CLAUSE_DECL (c) = temp;
2291 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2292 gimple_omp_taskreg_set_clauses (stmt, c);
2295 if (msk == GF_OMP_FOR_KIND_TASKLOOP
2296 && omp_find_clause (gimple_omp_task_clauses (stmt),
2297 OMP_CLAUSE_REDUCTION))
2299 tree type = build_pointer_type (pointer_sized_int_node);
2300 tree temp = create_tmp_var (type);
2301 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
2302 insert_decl_map (&outer_ctx->cb, temp, temp);
2303 OMP_CLAUSE_DECL (c) = temp;
2304 OMP_CLAUSE_CHAIN (c) = gimple_omp_task_clauses (stmt);
2305 gimple_omp_task_set_clauses (stmt, c);
2309 /* Scan an OpenMP parallel directive. */
2311 static void
2312 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2314 omp_context *ctx;
2315 tree name;
2316 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
2318 /* Ignore parallel directives with empty bodies, unless there
2319 are copyin clauses. */
2320 if (optimize > 0
2321 && empty_body_p (gimple_omp_body (stmt))
2322 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
2323 OMP_CLAUSE_COPYIN) == NULL)
2325 gsi_replace (gsi, gimple_build_nop (), false);
2326 return;
2329 if (gimple_omp_parallel_combined_p (stmt))
2330 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
2331 for (tree c = omp_find_clause (gimple_omp_parallel_clauses (stmt),
2332 OMP_CLAUSE_REDUCTION);
2333 c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE_REDUCTION))
2334 if (OMP_CLAUSE_REDUCTION_TASK (c))
2336 tree type = build_pointer_type (pointer_sized_int_node);
2337 tree temp = create_tmp_var (type);
2338 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
2339 if (outer_ctx)
2340 insert_decl_map (&outer_ctx->cb, temp, temp);
2341 OMP_CLAUSE_DECL (c) = temp;
2342 OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt);
2343 gimple_omp_parallel_set_clauses (stmt, c);
2344 break;
2346 else if (OMP_CLAUSE_CHAIN (c) == NULL_TREE)
2347 break;
2349 ctx = new_omp_context (stmt, outer_ctx);
2350 taskreg_contexts.safe_push (ctx);
2351 if (taskreg_nesting_level > 1)
2352 ctx->is_nested = true;
2353 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2354 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2355 name = create_tmp_var_name (".omp_data_s");
2356 name = build_decl (gimple_location (stmt),
2357 TYPE_DECL, name, ctx->record_type);
2358 DECL_ARTIFICIAL (name) = 1;
2359 DECL_NAMELESS (name) = 1;
2360 TYPE_NAME (ctx->record_type) = name;
2361 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2362 create_omp_child_function (ctx, false);
2363 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
2365 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
2366 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2368 if (TYPE_FIELDS (ctx->record_type) == NULL)
2369 ctx->record_type = ctx->receiver_decl = NULL;
2372 /* Scan an OpenMP task directive. */
2374 static void
2375 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2377 omp_context *ctx;
2378 tree name, t;
2379 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
2381 /* Ignore task directives with empty bodies, unless they have depend
2382 clause. */
2383 if (optimize > 0
2384 && gimple_omp_body (stmt)
2385 && empty_body_p (gimple_omp_body (stmt))
2386 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
2388 gsi_replace (gsi, gimple_build_nop (), false);
2389 return;
2392 if (gimple_omp_task_taskloop_p (stmt))
2393 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
2395 ctx = new_omp_context (stmt, outer_ctx);
2397 if (gimple_omp_task_taskwait_p (stmt))
2399 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2400 return;
2403 taskreg_contexts.safe_push (ctx);
2404 if (taskreg_nesting_level > 1)
2405 ctx->is_nested = true;
2406 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2407 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2408 name = create_tmp_var_name (".omp_data_s");
2409 name = build_decl (gimple_location (stmt),
2410 TYPE_DECL, name, ctx->record_type);
2411 DECL_ARTIFICIAL (name) = 1;
2412 DECL_NAMELESS (name) = 1;
2413 TYPE_NAME (ctx->record_type) = name;
2414 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2415 create_omp_child_function (ctx, false);
2416 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
2418 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2420 if (ctx->srecord_type)
2422 name = create_tmp_var_name (".omp_data_a");
2423 name = build_decl (gimple_location (stmt),
2424 TYPE_DECL, name, ctx->srecord_type);
2425 DECL_ARTIFICIAL (name) = 1;
2426 DECL_NAMELESS (name) = 1;
2427 TYPE_NAME (ctx->srecord_type) = name;
2428 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
2429 create_omp_child_function (ctx, true);
2432 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2434 if (TYPE_FIELDS (ctx->record_type) == NULL)
2436 ctx->record_type = ctx->receiver_decl = NULL;
2437 t = build_int_cst (long_integer_type_node, 0);
2438 gimple_omp_task_set_arg_size (stmt, t);
2439 t = build_int_cst (long_integer_type_node, 1);
2440 gimple_omp_task_set_arg_align (stmt, t);
2444 /* Helper function for finish_taskreg_scan, called through walk_tree.
2445 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2446 tree, replace it in the expression. */
2448 static tree
2449 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
2451 if (VAR_P (*tp))
2453 omp_context *ctx = (omp_context *) data;
2454 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
2455 if (t != *tp)
2457 if (DECL_HAS_VALUE_EXPR_P (t))
2458 t = unshare_expr (DECL_VALUE_EXPR (t));
2459 *tp = t;
2461 *walk_subtrees = 0;
2463 else if (IS_TYPE_OR_DECL_P (*tp))
2464 *walk_subtrees = 0;
2465 return NULL_TREE;
2468 /* If any decls have been made addressable during scan_omp,
2469 adjust their fields if needed, and layout record types
2470 of parallel/task constructs. */
2472 static void
2473 finish_taskreg_scan (omp_context *ctx)
2475 if (ctx->record_type == NULL_TREE)
2476 return;
2478 /* If any make_addressable_vars were needed, verify all
2479 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2480 statements if use_pointer_for_field hasn't changed
2481 because of that. If it did, update field types now. */
2482 if (make_addressable_vars)
2484 tree c;
2486 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2487 c; c = OMP_CLAUSE_CHAIN (c))
2488 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
2489 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
2491 tree decl = OMP_CLAUSE_DECL (c);
2493 /* Global variables don't need to be copied,
2494 the receiver side will use them directly. */
2495 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
2496 continue;
2497 if (!bitmap_bit_p (make_addressable_vars, DECL_UID (decl))
2498 || !use_pointer_for_field (decl, ctx))
2499 continue;
2500 tree field = lookup_field (decl, ctx);
2501 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
2502 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
2503 continue;
2504 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
2505 TREE_THIS_VOLATILE (field) = 0;
2506 DECL_USER_ALIGN (field) = 0;
2507 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
2508 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
2509 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
2510 if (ctx->srecord_type)
2512 tree sfield = lookup_sfield (decl, ctx);
2513 TREE_TYPE (sfield) = TREE_TYPE (field);
2514 TREE_THIS_VOLATILE (sfield) = 0;
2515 DECL_USER_ALIGN (sfield) = 0;
2516 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
2517 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
2518 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
2523 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
2525 tree clauses = gimple_omp_parallel_clauses (ctx->stmt);
2526 tree c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2527 if (c)
2529 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2530 expects to find it at the start of data. */
2531 tree f = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2532 tree *p = &TYPE_FIELDS (ctx->record_type);
2533 while (*p)
2534 if (*p == f)
2536 *p = DECL_CHAIN (*p);
2537 break;
2539 else
2540 p = &DECL_CHAIN (*p);
2541 DECL_CHAIN (f) = TYPE_FIELDS (ctx->record_type);
2542 TYPE_FIELDS (ctx->record_type) = f;
2544 layout_type (ctx->record_type);
2545 fixup_child_record_type (ctx);
2547 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2549 layout_type (ctx->record_type);
2550 fixup_child_record_type (ctx);
2552 else
2554 location_t loc = gimple_location (ctx->stmt);
2555 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2556 tree detach_clause
2557 = omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
2558 OMP_CLAUSE_DETACH);
2559 /* Move VLA fields to the end. */
2560 p = &TYPE_FIELDS (ctx->record_type);
2561 while (*p)
2562 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2563 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2565 *q = *p;
2566 *p = TREE_CHAIN (*p);
2567 TREE_CHAIN (*q) = NULL_TREE;
2568 q = &TREE_CHAIN (*q);
2570 else
2571 p = &DECL_CHAIN (*p);
2572 *p = vla_fields;
2573 if (gimple_omp_task_taskloop_p (ctx->stmt))
2575 /* Move fields corresponding to first and second _looptemp_
2576 clause first. There are filled by GOMP_taskloop
2577 and thus need to be in specific positions. */
2578 tree clauses = gimple_omp_task_clauses (ctx->stmt);
2579 tree c1 = omp_find_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
2580 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2581 OMP_CLAUSE__LOOPTEMP_);
2582 tree c3 = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2583 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2584 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2585 tree f3 = c3 ? lookup_field (OMP_CLAUSE_DECL (c3), ctx) : NULL_TREE;
2586 p = &TYPE_FIELDS (ctx->record_type);
2587 while (*p)
2588 if (*p == f1 || *p == f2 || *p == f3)
2589 *p = DECL_CHAIN (*p);
2590 else
2591 p = &DECL_CHAIN (*p);
2592 DECL_CHAIN (f1) = f2;
2593 if (c3)
2595 DECL_CHAIN (f2) = f3;
2596 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->record_type);
2598 else
2599 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2600 TYPE_FIELDS (ctx->record_type) = f1;
2601 if (ctx->srecord_type)
2603 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2604 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2605 if (c3)
2606 f3 = lookup_sfield (OMP_CLAUSE_DECL (c3), ctx);
2607 p = &TYPE_FIELDS (ctx->srecord_type);
2608 while (*p)
2609 if (*p == f1 || *p == f2 || *p == f3)
2610 *p = DECL_CHAIN (*p);
2611 else
2612 p = &DECL_CHAIN (*p);
2613 DECL_CHAIN (f1) = f2;
2614 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2615 if (c3)
2617 DECL_CHAIN (f2) = f3;
2618 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->srecord_type);
2620 else
2621 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2622 TYPE_FIELDS (ctx->srecord_type) = f1;
2625 if (detach_clause)
2627 tree c, field;
2629 /* Look for a firstprivate clause with the detach event handle. */
2630 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2631 c; c = OMP_CLAUSE_CHAIN (c))
2633 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
2634 continue;
2635 if (maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c), ctx)
2636 == OMP_CLAUSE_DECL (detach_clause))
2637 break;
2640 gcc_assert (c);
2641 field = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2643 /* Move field corresponding to the detach clause first.
2644 This is filled by GOMP_task and needs to be in a
2645 specific position. */
2646 p = &TYPE_FIELDS (ctx->record_type);
2647 while (*p)
2648 if (*p == field)
2649 *p = DECL_CHAIN (*p);
2650 else
2651 p = &DECL_CHAIN (*p);
2652 DECL_CHAIN (field) = TYPE_FIELDS (ctx->record_type);
2653 TYPE_FIELDS (ctx->record_type) = field;
2654 if (ctx->srecord_type)
2656 field = lookup_sfield (OMP_CLAUSE_DECL (c), ctx);
2657 p = &TYPE_FIELDS (ctx->srecord_type);
2658 while (*p)
2659 if (*p == field)
2660 *p = DECL_CHAIN (*p);
2661 else
2662 p = &DECL_CHAIN (*p);
2663 DECL_CHAIN (field) = TYPE_FIELDS (ctx->srecord_type);
2664 TYPE_FIELDS (ctx->srecord_type) = field;
2667 layout_type (ctx->record_type);
2668 fixup_child_record_type (ctx);
2669 if (ctx->srecord_type)
2670 layout_type (ctx->srecord_type);
2671 tree t = fold_convert_loc (loc, long_integer_type_node,
2672 TYPE_SIZE_UNIT (ctx->record_type));
2673 if (TREE_CODE (t) != INTEGER_CST)
2675 t = unshare_expr (t);
2676 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2678 gimple_omp_task_set_arg_size (ctx->stmt, t);
2679 t = build_int_cst (long_integer_type_node,
2680 TYPE_ALIGN_UNIT (ctx->record_type));
2681 gimple_omp_task_set_arg_align (ctx->stmt, t);
2685 /* Find the enclosing offload context. */
2687 static omp_context *
2688 enclosing_target_ctx (omp_context *ctx)
2690 for (; ctx; ctx = ctx->outer)
2691 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2692 break;
2694 return ctx;
2697 /* Return whether CTX's parent compute construct is an OpenACC 'kernels'
2698 construct.
2699 (This doesn't include OpenACC 'kernels' decomposed parts.) */
2701 static bool
2702 ctx_in_oacc_kernels_region (omp_context *ctx)
2704 for (;ctx != NULL; ctx = ctx->outer)
2706 gimple *stmt = ctx->stmt;
2707 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2708 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2709 return true;
2712 return false;
2715 /* Check the parallelism clauses inside a OpenACC 'kernels' region.
2716 (This doesn't include OpenACC 'kernels' decomposed parts.)
2717 Until kernels handling moves to use the same loop indirection
2718 scheme as parallel, we need to do this checking early. */
2720 static unsigned
2721 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2723 bool checking = true;
2724 unsigned outer_mask = 0;
2725 unsigned this_mask = 0;
2726 bool has_seq = false, has_auto = false;
2728 if (ctx->outer)
2729 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2730 if (!stmt)
2732 checking = false;
2733 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2734 return outer_mask;
2735 stmt = as_a <gomp_for *> (ctx->stmt);
2738 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2740 switch (OMP_CLAUSE_CODE (c))
2742 case OMP_CLAUSE_GANG:
2743 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2744 break;
2745 case OMP_CLAUSE_WORKER:
2746 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2747 break;
2748 case OMP_CLAUSE_VECTOR:
2749 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2750 break;
2751 case OMP_CLAUSE_SEQ:
2752 has_seq = true;
2753 break;
2754 case OMP_CLAUSE_AUTO:
2755 has_auto = true;
2756 break;
2757 default:
2758 break;
2762 if (checking)
2764 if (has_seq && (this_mask || has_auto))
2765 error_at (gimple_location (stmt), "%<seq%> overrides other"
2766 " OpenACC loop specifiers");
2767 else if (has_auto && this_mask)
2768 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2769 " OpenACC loop specifiers");
2771 if (this_mask & outer_mask)
2772 error_at (gimple_location (stmt), "inner loop uses same"
2773 " OpenACC parallelism as containing loop");
2776 return outer_mask | this_mask;
2779 /* Scan a GIMPLE_OMP_FOR. */
2781 static omp_context *
2782 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2784 omp_context *ctx;
2785 size_t i;
2786 tree clauses = gimple_omp_for_clauses (stmt);
2788 ctx = new_omp_context (stmt, outer_ctx);
2790 if (is_gimple_omp_oacc (stmt))
2792 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2794 if (!(tgt && is_oacc_kernels (tgt)))
2795 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2797 tree c_op0;
2798 switch (OMP_CLAUSE_CODE (c))
2800 case OMP_CLAUSE_GANG:
2801 c_op0 = OMP_CLAUSE_GANG_EXPR (c);
2802 break;
2804 case OMP_CLAUSE_WORKER:
2805 c_op0 = OMP_CLAUSE_WORKER_EXPR (c);
2806 break;
2808 case OMP_CLAUSE_VECTOR:
2809 c_op0 = OMP_CLAUSE_VECTOR_EXPR (c);
2810 break;
2812 default:
2813 continue;
2816 if (c_op0)
2818 /* By construction, this is impossible for OpenACC 'kernels'
2819 decomposed parts. */
2820 gcc_assert (!(tgt && is_oacc_kernels_decomposed_part (tgt)));
2822 error_at (OMP_CLAUSE_LOCATION (c),
2823 "argument not permitted on %qs clause",
2824 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
2825 if (tgt)
2826 inform (gimple_location (tgt->stmt),
2827 "enclosing parent compute construct");
2828 else if (oacc_get_fn_attrib (current_function_decl))
2829 inform (DECL_SOURCE_LOCATION (current_function_decl),
2830 "enclosing routine");
2831 else
2832 gcc_unreachable ();
2836 if (tgt && is_oacc_kernels (tgt))
2837 check_oacc_kernel_gwv (stmt, ctx);
2839 /* Collect all variables named in reductions on this loop. Ensure
2840 that, if this loop has a reduction on some variable v, and there is
2841 a reduction on v somewhere in an outer context, then there is a
2842 reduction on v on all intervening loops as well. */
2843 tree local_reduction_clauses = NULL;
2844 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2846 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
2847 local_reduction_clauses
2848 = tree_cons (NULL, c, local_reduction_clauses);
2850 if (ctx->outer_reduction_clauses == NULL && ctx->outer != NULL)
2851 ctx->outer_reduction_clauses
2852 = chainon (unshare_expr (ctx->outer->local_reduction_clauses),
2853 ctx->outer->outer_reduction_clauses);
2854 tree outer_reduction_clauses = ctx->outer_reduction_clauses;
2855 tree local_iter = local_reduction_clauses;
2856 for (; local_iter; local_iter = TREE_CHAIN (local_iter))
2858 tree local_clause = TREE_VALUE (local_iter);
2859 tree local_var = OMP_CLAUSE_DECL (local_clause);
2860 tree_code local_op = OMP_CLAUSE_REDUCTION_CODE (local_clause);
2861 bool have_outer_reduction = false;
2862 tree ctx_iter = outer_reduction_clauses;
2863 for (; ctx_iter; ctx_iter = TREE_CHAIN (ctx_iter))
2865 tree outer_clause = TREE_VALUE (ctx_iter);
2866 tree outer_var = OMP_CLAUSE_DECL (outer_clause);
2867 tree_code outer_op = OMP_CLAUSE_REDUCTION_CODE (outer_clause);
2868 if (outer_var == local_var && outer_op != local_op)
2870 if (warning_at (OMP_CLAUSE_LOCATION (local_clause),
2871 OPT_Wopenmp, "conflicting reduction "
2872 "operations for %qE",
2873 local_var))
2874 inform (OMP_CLAUSE_LOCATION (outer_clause),
2875 "location of the previous reduction for %qE",
2876 outer_var);
2878 if (outer_var == local_var)
2880 have_outer_reduction = true;
2881 break;
2884 if (have_outer_reduction)
2886 /* There is a reduction on outer_var both on this loop and on
2887 some enclosing loop. Walk up the context tree until such a
2888 loop with a reduction on outer_var is found, and complain
2889 about all intervening loops that do not have such a
2890 reduction. */
2891 struct omp_context *curr_loop = ctx->outer;
2892 bool found = false;
2893 while (curr_loop != NULL)
2895 tree curr_iter = curr_loop->local_reduction_clauses;
2896 for (; curr_iter; curr_iter = TREE_CHAIN (curr_iter))
2898 tree curr_clause = TREE_VALUE (curr_iter);
2899 tree curr_var = OMP_CLAUSE_DECL (curr_clause);
2900 if (curr_var == local_var)
2902 found = true;
2903 break;
2906 if (!found)
2907 warning_at (gimple_location (curr_loop->stmt), OPT_Wopenmp,
2908 "nested loop in reduction needs "
2909 "reduction clause for %qE",
2910 local_var);
2911 else
2912 break;
2913 curr_loop = curr_loop->outer;
2917 ctx->local_reduction_clauses = local_reduction_clauses;
2918 ctx->outer_reduction_clauses
2919 = chainon (unshare_expr (ctx->local_reduction_clauses),
2920 ctx->outer_reduction_clauses);
2922 if (tgt && is_oacc_kernels (tgt))
2924 /* Strip out reductions, as they are not handled yet. */
2925 tree *prev_ptr = &clauses;
2927 while (tree probe = *prev_ptr)
2929 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2931 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2932 *prev_ptr = *next_ptr;
2933 else
2934 prev_ptr = next_ptr;
2937 gimple_omp_for_set_clauses (stmt, clauses);
2941 scan_sharing_clauses (clauses, ctx);
2943 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2944 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2946 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2947 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2948 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2949 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2951 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2952 return ctx;
2955 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2957 static void
2958 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2959 omp_context *outer_ctx)
2961 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2962 gsi_replace (gsi, bind, false);
2963 gimple_seq seq = NULL;
2964 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2965 tree cond = create_tmp_var_raw (integer_type_node);
2966 DECL_CONTEXT (cond) = current_function_decl;
2967 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2968 gimple_bind_set_vars (bind, cond);
2969 gimple_call_set_lhs (g, cond);
2970 gimple_seq_add_stmt (&seq, g);
2971 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2972 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2973 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2974 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2975 gimple_seq_add_stmt (&seq, g);
2976 g = gimple_build_label (lab1);
2977 gimple_seq_add_stmt (&seq, g);
2978 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2979 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2980 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2981 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2982 gimple_omp_for_set_clauses (new_stmt, clause);
2983 gimple_seq_add_stmt (&seq, new_stmt);
2984 g = gimple_build_goto (lab3);
2985 gimple_seq_add_stmt (&seq, g);
2986 g = gimple_build_label (lab2);
2987 gimple_seq_add_stmt (&seq, g);
2988 gimple_seq_add_stmt (&seq, stmt);
2989 g = gimple_build_label (lab3);
2990 gimple_seq_add_stmt (&seq, g);
2991 gimple_bind_set_body (bind, seq);
2992 update_stmt (bind);
2993 scan_omp_for (new_stmt, outer_ctx);
2994 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2997 static tree omp_find_scan (gimple_stmt_iterator *, bool *,
2998 struct walk_stmt_info *);
2999 static omp_context *maybe_lookup_ctx (gimple *);
3001 /* Duplicate #pragma omp simd, one for the scan input phase loop and one
3002 for scan phase loop. */
3004 static void
3005 scan_omp_simd_scan (gimple_stmt_iterator *gsi, gomp_for *stmt,
3006 omp_context *outer_ctx)
3008 /* The only change between inclusive and exclusive scan will be
3009 within the first simd loop, so just use inclusive in the
3010 worksharing loop. */
3011 outer_ctx->scan_inclusive = true;
3012 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_INCLUSIVE);
3013 OMP_CLAUSE_DECL (c) = integer_zero_node;
3015 gomp_scan *input_stmt = gimple_build_omp_scan (NULL, NULL_TREE);
3016 gomp_scan *scan_stmt = gimple_build_omp_scan (NULL, c);
3017 gsi_replace (gsi, input_stmt, false);
3018 gimple_seq input_body = NULL;
3019 gimple_seq_add_stmt (&input_body, stmt);
3020 gsi_insert_after (gsi, scan_stmt, GSI_NEW_STMT);
3022 gimple_stmt_iterator input1_gsi = gsi_none ();
3023 struct walk_stmt_info wi;
3024 memset (&wi, 0, sizeof (wi));
3025 wi.val_only = true;
3026 wi.info = (void *) &input1_gsi;
3027 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), omp_find_scan, NULL, &wi);
3028 gcc_assert (!gsi_end_p (input1_gsi));
3030 gimple *input_stmt1 = gsi_stmt (input1_gsi);
3031 gsi_next (&input1_gsi);
3032 gimple *scan_stmt1 = gsi_stmt (input1_gsi);
3033 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
3034 c = gimple_omp_scan_clauses (as_a <gomp_scan *> (scan_stmt1));
3035 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
3036 std::swap (input_stmt1, scan_stmt1);
3038 gimple_seq input_body1 = gimple_omp_body (input_stmt1);
3039 gimple_omp_set_body (input_stmt1, NULL);
3041 gimple_seq scan_body = copy_gimple_seq_and_replace_locals (stmt);
3042 gomp_for *new_stmt = as_a <gomp_for *> (scan_body);
3044 gimple_omp_set_body (input_stmt1, input_body1);
3045 gimple_omp_set_body (scan_stmt1, NULL);
3047 gimple_stmt_iterator input2_gsi = gsi_none ();
3048 memset (&wi, 0, sizeof (wi));
3049 wi.val_only = true;
3050 wi.info = (void *) &input2_gsi;
3051 walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt), omp_find_scan,
3052 NULL, &wi);
3053 gcc_assert (!gsi_end_p (input2_gsi));
3055 gimple *input_stmt2 = gsi_stmt (input2_gsi);
3056 gsi_next (&input2_gsi);
3057 gimple *scan_stmt2 = gsi_stmt (input2_gsi);
3058 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
3059 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
3060 std::swap (input_stmt2, scan_stmt2);
3062 gimple_omp_set_body (input_stmt2, NULL);
3064 gimple_omp_set_body (input_stmt, input_body);
3065 gimple_omp_set_body (scan_stmt, scan_body);
3067 omp_context *ctx = new_omp_context (input_stmt, outer_ctx);
3068 scan_omp (gimple_omp_body_ptr (input_stmt), ctx);
3070 ctx = new_omp_context (scan_stmt, outer_ctx);
3071 scan_omp (gimple_omp_body_ptr (scan_stmt), ctx);
3073 maybe_lookup_ctx (new_stmt)->for_simd_scan_phase = true;
3076 /* Scan an OpenMP sections directive. */
3078 static void
3079 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
3081 omp_context *ctx;
3083 ctx = new_omp_context (stmt, outer_ctx);
3084 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
3085 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3088 /* Scan an OpenMP single directive. */
3090 static void
3091 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
3093 omp_context *ctx;
3094 tree name;
3096 ctx = new_omp_context (stmt, outer_ctx);
3097 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
3098 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
3099 name = create_tmp_var_name (".omp_copy_s");
3100 name = build_decl (gimple_location (stmt),
3101 TYPE_DECL, name, ctx->record_type);
3102 TYPE_NAME (ctx->record_type) = name;
3104 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
3105 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3107 if (TYPE_FIELDS (ctx->record_type) == NULL)
3108 ctx->record_type = NULL;
3109 else
3110 layout_type (ctx->record_type);
3113 /* Scan a GIMPLE_OMP_TARGET. */
3115 static void
3116 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
3118 omp_context *ctx;
3119 tree name;
3120 bool offloaded = is_gimple_omp_offloaded (stmt);
3121 tree clauses = gimple_omp_target_clauses (stmt);
3123 ctx = new_omp_context (stmt, outer_ctx);
3124 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
3125 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
3126 name = create_tmp_var_name (".omp_data_t");
3127 name = build_decl (gimple_location (stmt),
3128 TYPE_DECL, name, ctx->record_type);
3129 DECL_ARTIFICIAL (name) = 1;
3130 DECL_NAMELESS (name) = 1;
3131 TYPE_NAME (ctx->record_type) = name;
3132 TYPE_ARTIFICIAL (ctx->record_type) = 1;
3134 if (offloaded)
3136 create_omp_child_function (ctx, false);
3137 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
3140 scan_sharing_clauses (clauses, ctx);
3141 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3143 if (TYPE_FIELDS (ctx->record_type) == NULL)
3144 ctx->record_type = ctx->receiver_decl = NULL;
3145 else
3147 TYPE_FIELDS (ctx->record_type)
3148 = nreverse (TYPE_FIELDS (ctx->record_type));
3149 if (flag_checking)
3151 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
3152 for (tree field = TYPE_FIELDS (ctx->record_type);
3153 field;
3154 field = DECL_CHAIN (field))
3155 gcc_assert (DECL_ALIGN (field) == align);
3157 layout_type (ctx->record_type);
3158 if (offloaded)
3159 fixup_child_record_type (ctx);
3162 if (ctx->teams_nested_p && ctx->nonteams_nested_p)
3164 error_at (gimple_location (stmt),
3165 "%<target%> construct with nested %<teams%> construct "
3166 "contains directives outside of the %<teams%> construct");
3167 gimple_omp_set_body (stmt, gimple_build_bind (NULL, NULL, NULL));
3171 /* Scan an OpenMP teams directive. */
3173 static void
3174 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
3176 omp_context *ctx = new_omp_context (stmt, outer_ctx);
3178 if (!gimple_omp_teams_host (stmt))
3180 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
3181 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3182 return;
3184 taskreg_contexts.safe_push (ctx);
3185 gcc_assert (taskreg_nesting_level == 1);
3186 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
3187 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
3188 tree name = create_tmp_var_name (".omp_data_s");
3189 name = build_decl (gimple_location (stmt),
3190 TYPE_DECL, name, ctx->record_type);
3191 DECL_ARTIFICIAL (name) = 1;
3192 DECL_NAMELESS (name) = 1;
3193 TYPE_NAME (ctx->record_type) = name;
3194 TYPE_ARTIFICIAL (ctx->record_type) = 1;
3195 create_omp_child_function (ctx, false);
3196 gimple_omp_teams_set_child_fn (stmt, ctx->cb.dst_fn);
3198 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
3199 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3201 if (TYPE_FIELDS (ctx->record_type) == NULL)
3202 ctx->record_type = ctx->receiver_decl = NULL;
3205 /* Check nesting restrictions. */
3206 static bool
3207 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
3209 tree c;
3211 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
3212 inside an OpenACC CTX. */
3213 if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3214 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE)
3215 /* ..., except for the atomic codes that OpenACC shares with OpenMP. */
3217 else if (!(is_gimple_omp (stmt)
3218 && is_gimple_omp_oacc (stmt)))
3220 if (oacc_get_fn_attrib (cfun->decl) != NULL)
3222 error_at (gimple_location (stmt),
3223 "non-OpenACC construct inside of OpenACC routine");
3224 return false;
3226 else
3227 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
3228 if (is_gimple_omp (octx->stmt)
3229 && is_gimple_omp_oacc (octx->stmt))
3231 error_at (gimple_location (stmt),
3232 "non-OpenACC construct inside of OpenACC region");
3233 return false;
3237 if (ctx != NULL)
3239 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET
3240 && gimple_omp_target_kind (ctx->stmt) == GF_OMP_TARGET_KIND_REGION)
3242 c = omp_find_clause (gimple_omp_target_clauses (ctx->stmt),
3243 OMP_CLAUSE_DEVICE);
3244 if (c && OMP_CLAUSE_DEVICE_ANCESTOR (c))
3246 error_at (gimple_location (stmt),
3247 "OpenMP constructs are not allowed in target region "
3248 "with %<ancestor%>");
3249 return false;
3252 if (gimple_code (stmt) == GIMPLE_OMP_TEAMS && !ctx->teams_nested_p)
3253 ctx->teams_nested_p = true;
3254 else
3255 ctx->nonteams_nested_p = true;
3257 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN
3258 && ctx->outer
3259 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
3260 ctx = ctx->outer;
3261 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3262 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
3263 && !ctx->loop_p)
3265 c = NULL_TREE;
3266 if (ctx->order_concurrent
3267 && (gimple_code (stmt) == GIMPLE_OMP_ORDERED
3268 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3269 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
3271 error_at (gimple_location (stmt),
3272 "OpenMP constructs other than %<parallel%>, %<loop%>"
3273 " or %<simd%> may not be nested inside a region with"
3274 " the %<order(concurrent)%> clause");
3275 return false;
3277 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
3279 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3280 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3282 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
3283 && (ctx->outer == NULL
3284 || !gimple_omp_for_combined_into_p (ctx->stmt)
3285 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
3286 || (gimple_omp_for_kind (ctx->outer->stmt)
3287 != GF_OMP_FOR_KIND_FOR)
3288 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
3290 error_at (gimple_location (stmt),
3291 "%<ordered simd threads%> must be closely "
3292 "nested inside of %<%s simd%> region",
3293 lang_GNU_Fortran () ? "do" : "for");
3294 return false;
3296 return true;
3299 else if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3300 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE
3301 || gimple_code (stmt) == GIMPLE_OMP_SCAN)
3302 return true;
3303 else if (gimple_code (stmt) == GIMPLE_OMP_FOR
3304 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
3305 return true;
3306 error_at (gimple_location (stmt),
3307 "OpenMP constructs other than "
3308 "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may "
3309 "not be nested inside %<simd%> region");
3310 return false;
3312 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3314 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
3315 || (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE
3316 && omp_find_clause (gimple_omp_for_clauses (stmt),
3317 OMP_CLAUSE_BIND) == NULL_TREE))
3318 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
3320 error_at (gimple_location (stmt),
3321 "only %<distribute%>, %<parallel%> or %<loop%> "
3322 "regions are allowed to be strictly nested inside "
3323 "%<teams%> region");
3324 return false;
3327 else if (ctx->order_concurrent
3328 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL
3329 && (gimple_code (stmt) != GIMPLE_OMP_FOR
3330 || gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_SIMD)
3331 && gimple_code (stmt) != GIMPLE_OMP_SCAN)
3333 if (ctx->loop_p)
3334 error_at (gimple_location (stmt),
3335 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3336 "%<simd%> may not be nested inside a %<loop%> region");
3337 else
3338 error_at (gimple_location (stmt),
3339 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3340 "%<simd%> may not be nested inside a region with "
3341 "the %<order(concurrent)%> clause");
3342 return false;
3345 switch (gimple_code (stmt))
3347 case GIMPLE_OMP_FOR:
3348 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD)
3349 return true;
3350 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
3352 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
3354 error_at (gimple_location (stmt),
3355 "%<distribute%> region must be strictly nested "
3356 "inside %<teams%> construct");
3357 return false;
3359 return true;
3361 /* We split taskloop into task and nested taskloop in it. */
3362 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3363 return true;
3364 /* For now, hope this will change and loop bind(parallel) will not
3365 be allowed in lots of contexts. */
3366 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
3367 && omp_find_clause (gimple_omp_for_clauses (stmt), OMP_CLAUSE_BIND))
3368 return true;
3369 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
3371 bool ok = false;
3373 if (ctx)
3374 switch (gimple_code (ctx->stmt))
3376 case GIMPLE_OMP_FOR:
3377 ok = (gimple_omp_for_kind (ctx->stmt)
3378 == GF_OMP_FOR_KIND_OACC_LOOP);
3379 break;
3381 case GIMPLE_OMP_TARGET:
3382 switch (gimple_omp_target_kind (ctx->stmt))
3384 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3385 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3386 case GF_OMP_TARGET_KIND_OACC_SERIAL:
3387 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3388 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3389 ok = true;
3390 break;
3392 default:
3393 break;
3396 default:
3397 break;
3399 else if (oacc_get_fn_attrib (current_function_decl))
3400 ok = true;
3401 if (!ok)
3403 error_at (gimple_location (stmt),
3404 "OpenACC loop directive must be associated with"
3405 " an OpenACC compute region");
3406 return false;
3409 /* FALLTHRU */
3410 case GIMPLE_CALL:
3411 if (is_gimple_call (stmt)
3412 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3413 == BUILT_IN_GOMP_CANCEL
3414 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3415 == BUILT_IN_GOMP_CANCELLATION_POINT))
3417 const char *bad = NULL;
3418 const char *kind = NULL;
3419 const char *construct
3420 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3421 == BUILT_IN_GOMP_CANCEL)
3422 ? "cancel"
3423 : "cancellation point";
3424 if (ctx == NULL)
3426 error_at (gimple_location (stmt), "orphaned %qs construct",
3427 construct);
3428 return false;
3430 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
3431 ? tree_to_shwi (gimple_call_arg (stmt, 0))
3432 : 0)
3434 case 1:
3435 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
3436 bad = "parallel";
3437 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3438 == BUILT_IN_GOMP_CANCEL
3439 && !integer_zerop (gimple_call_arg (stmt, 1)))
3440 ctx->cancellable = true;
3441 kind = "parallel";
3442 break;
3443 case 2:
3444 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3445 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
3446 bad = "for";
3447 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3448 == BUILT_IN_GOMP_CANCEL
3449 && !integer_zerop (gimple_call_arg (stmt, 1)))
3451 ctx->cancellable = true;
3452 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3453 OMP_CLAUSE_NOWAIT))
3454 warning_at (gimple_location (stmt), OPT_Wopenmp,
3455 "%<cancel for%> inside "
3456 "%<nowait%> for construct");
3457 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3458 OMP_CLAUSE_ORDERED))
3459 warning_at (gimple_location (stmt), OPT_Wopenmp,
3460 "%<cancel for%> inside "
3461 "%<ordered%> for construct");
3463 kind = "for";
3464 break;
3465 case 4:
3466 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
3467 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
3468 bad = "sections";
3469 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3470 == BUILT_IN_GOMP_CANCEL
3471 && !integer_zerop (gimple_call_arg (stmt, 1)))
3473 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
3475 ctx->cancellable = true;
3476 if (omp_find_clause (gimple_omp_sections_clauses
3477 (ctx->stmt),
3478 OMP_CLAUSE_NOWAIT))
3479 warning_at (gimple_location (stmt), OPT_Wopenmp,
3480 "%<cancel sections%> inside "
3481 "%<nowait%> sections construct");
3483 else
3485 gcc_assert (ctx->outer
3486 && gimple_code (ctx->outer->stmt)
3487 == GIMPLE_OMP_SECTIONS);
3488 ctx->outer->cancellable = true;
3489 if (omp_find_clause (gimple_omp_sections_clauses
3490 (ctx->outer->stmt),
3491 OMP_CLAUSE_NOWAIT))
3492 warning_at (gimple_location (stmt), OPT_Wopenmp,
3493 "%<cancel sections%> inside "
3494 "%<nowait%> sections construct");
3497 kind = "sections";
3498 break;
3499 case 8:
3500 if (!is_task_ctx (ctx)
3501 && (!is_taskloop_ctx (ctx)
3502 || ctx->outer == NULL
3503 || !is_task_ctx (ctx->outer)))
3504 bad = "task";
3505 else
3507 for (omp_context *octx = ctx->outer;
3508 octx; octx = octx->outer)
3510 switch (gimple_code (octx->stmt))
3512 case GIMPLE_OMP_TASKGROUP:
3513 break;
3514 case GIMPLE_OMP_TARGET:
3515 if (gimple_omp_target_kind (octx->stmt)
3516 != GF_OMP_TARGET_KIND_REGION)
3517 continue;
3518 /* FALLTHRU */
3519 case GIMPLE_OMP_PARALLEL:
3520 case GIMPLE_OMP_TEAMS:
3521 error_at (gimple_location (stmt),
3522 "%<%s taskgroup%> construct not closely "
3523 "nested inside of %<taskgroup%> region",
3524 construct);
3525 return false;
3526 case GIMPLE_OMP_TASK:
3527 if (gimple_omp_task_taskloop_p (octx->stmt)
3528 && octx->outer
3529 && is_taskloop_ctx (octx->outer))
3531 tree clauses
3532 = gimple_omp_for_clauses (octx->outer->stmt);
3533 if (!omp_find_clause (clauses, OMP_CLAUSE_NOGROUP))
3534 break;
3536 continue;
3537 default:
3538 continue;
3540 break;
3542 ctx->cancellable = true;
3544 kind = "taskgroup";
3545 break;
3546 default:
3547 error_at (gimple_location (stmt), "invalid arguments");
3548 return false;
3550 if (bad)
3552 error_at (gimple_location (stmt),
3553 "%<%s %s%> construct not closely nested inside of %qs",
3554 construct, kind, bad);
3555 return false;
3558 /* FALLTHRU */
3559 case GIMPLE_OMP_SECTIONS:
3560 case GIMPLE_OMP_SINGLE:
3561 for (; ctx != NULL; ctx = ctx->outer)
3562 switch (gimple_code (ctx->stmt))
3564 case GIMPLE_OMP_FOR:
3565 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3566 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3567 break;
3568 /* FALLTHRU */
3569 case GIMPLE_OMP_SECTIONS:
3570 case GIMPLE_OMP_SINGLE:
3571 case GIMPLE_OMP_ORDERED:
3572 case GIMPLE_OMP_MASTER:
3573 case GIMPLE_OMP_MASKED:
3574 case GIMPLE_OMP_TASK:
3575 case GIMPLE_OMP_CRITICAL:
3576 if (is_gimple_call (stmt))
3578 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3579 != BUILT_IN_GOMP_BARRIER)
3580 return true;
3581 error_at (gimple_location (stmt),
3582 "barrier region may not be closely nested inside "
3583 "of work-sharing, %<loop%>, %<critical%>, "
3584 "%<ordered%>, %<master%>, %<masked%>, explicit "
3585 "%<task%> or %<taskloop%> region");
3586 return false;
3588 error_at (gimple_location (stmt),
3589 "work-sharing region may not be closely nested inside "
3590 "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, "
3591 "%<master%>, %<masked%>, explicit %<task%> or "
3592 "%<taskloop%> region");
3593 return false;
3594 case GIMPLE_OMP_PARALLEL:
3595 case GIMPLE_OMP_TEAMS:
3596 return true;
3597 case GIMPLE_OMP_TARGET:
3598 if (gimple_omp_target_kind (ctx->stmt)
3599 == GF_OMP_TARGET_KIND_REGION)
3600 return true;
3601 break;
3602 default:
3603 break;
3605 break;
3606 case GIMPLE_OMP_MASTER:
3607 case GIMPLE_OMP_MASKED:
3608 for (; ctx != NULL; ctx = ctx->outer)
3609 switch (gimple_code (ctx->stmt))
3611 case GIMPLE_OMP_FOR:
3612 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3613 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3614 break;
3615 /* FALLTHRU */
3616 case GIMPLE_OMP_SECTIONS:
3617 case GIMPLE_OMP_SINGLE:
3618 case GIMPLE_OMP_TASK:
3619 error_at (gimple_location (stmt),
3620 "%qs region may not be closely nested inside "
3621 "of work-sharing, %<loop%>, explicit %<task%> or "
3622 "%<taskloop%> region",
3623 gimple_code (stmt) == GIMPLE_OMP_MASTER
3624 ? "master" : "masked");
3625 return false;
3626 case GIMPLE_OMP_PARALLEL:
3627 case GIMPLE_OMP_TEAMS:
3628 return true;
3629 case GIMPLE_OMP_TARGET:
3630 if (gimple_omp_target_kind (ctx->stmt)
3631 == GF_OMP_TARGET_KIND_REGION)
3632 return true;
3633 break;
3634 default:
3635 break;
3637 break;
3638 case GIMPLE_OMP_SCOPE:
3639 for (; ctx != NULL; ctx = ctx->outer)
3640 switch (gimple_code (ctx->stmt))
3642 case GIMPLE_OMP_FOR:
3643 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3644 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3645 break;
3646 /* FALLTHRU */
3647 case GIMPLE_OMP_SECTIONS:
3648 case GIMPLE_OMP_SINGLE:
3649 case GIMPLE_OMP_TASK:
3650 case GIMPLE_OMP_CRITICAL:
3651 case GIMPLE_OMP_ORDERED:
3652 case GIMPLE_OMP_MASTER:
3653 case GIMPLE_OMP_MASKED:
3654 error_at (gimple_location (stmt),
3655 "%<scope%> region may not be closely nested inside "
3656 "of work-sharing, %<loop%>, explicit %<task%>, "
3657 "%<taskloop%>, %<critical%>, %<ordered%>, %<master%>, "
3658 "or %<masked%> region");
3659 return false;
3660 case GIMPLE_OMP_PARALLEL:
3661 case GIMPLE_OMP_TEAMS:
3662 return true;
3663 case GIMPLE_OMP_TARGET:
3664 if (gimple_omp_target_kind (ctx->stmt)
3665 == GF_OMP_TARGET_KIND_REGION)
3666 return true;
3667 break;
3668 default:
3669 break;
3671 break;
3672 case GIMPLE_OMP_TASK:
3673 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3674 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS)
3676 enum omp_clause_doacross_kind kind = OMP_CLAUSE_DOACROSS_KIND (c);
3677 error_at (OMP_CLAUSE_LOCATION (c),
3678 "%<%s(%s)%> is only allowed in %<omp ordered%>",
3679 OMP_CLAUSE_DOACROSS_DEPEND (c) ? "depend" : "doacross",
3680 kind == OMP_CLAUSE_DOACROSS_SOURCE ? "source" : "sink");
3681 return false;
3683 break;
3684 case GIMPLE_OMP_ORDERED:
3685 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3686 c; c = OMP_CLAUSE_CHAIN (c))
3688 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DOACROSS)
3690 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
3692 error_at (OMP_CLAUSE_LOCATION (c),
3693 "invalid depend kind in omp %<ordered%> %<depend%>");
3694 return false;
3696 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
3697 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
3698 continue;
3701 tree oclause;
3702 /* Look for containing ordered(N) loop. */
3703 if (ctx == NULL
3704 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3705 || (oclause
3706 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3707 OMP_CLAUSE_ORDERED)) == NULL_TREE)
3709 error_at (OMP_CLAUSE_LOCATION (c),
3710 "%<ordered%> construct with %<depend%> clause "
3711 "must be closely nested inside an %<ordered%> loop");
3712 return false;
3715 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3716 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3718 /* ordered simd must be closely nested inside of simd region,
3719 and simd region must not encounter constructs other than
3720 ordered simd, therefore ordered simd may be either orphaned,
3721 or ctx->stmt must be simd. The latter case is handled already
3722 earlier. */
3723 if (ctx != NULL)
3725 error_at (gimple_location (stmt),
3726 "%<ordered%> %<simd%> must be closely nested inside "
3727 "%<simd%> region");
3728 return false;
3731 for (; ctx != NULL; ctx = ctx->outer)
3732 switch (gimple_code (ctx->stmt))
3734 case GIMPLE_OMP_CRITICAL:
3735 case GIMPLE_OMP_TASK:
3736 case GIMPLE_OMP_ORDERED:
3737 ordered_in_taskloop:
3738 error_at (gimple_location (stmt),
3739 "%<ordered%> region may not be closely nested inside "
3740 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3741 "%<taskloop%> region");
3742 return false;
3743 case GIMPLE_OMP_FOR:
3744 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3745 goto ordered_in_taskloop;
3746 tree o;
3747 o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3748 OMP_CLAUSE_ORDERED);
3749 if (o == NULL)
3751 error_at (gimple_location (stmt),
3752 "%<ordered%> region must be closely nested inside "
3753 "a loop region with an %<ordered%> clause");
3754 return false;
3756 if (!gimple_omp_ordered_standalone_p (stmt))
3758 if (OMP_CLAUSE_ORDERED_DOACROSS (o))
3760 error_at (gimple_location (stmt),
3761 "%<ordered%> construct without %<doacross%> or "
3762 "%<depend%> clauses must not have the same "
3763 "binding region as %<ordered%> construct with "
3764 "those clauses");
3765 return false;
3767 else if (OMP_CLAUSE_ORDERED_EXPR (o))
3769 tree co
3770 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3771 OMP_CLAUSE_COLLAPSE);
3772 HOST_WIDE_INT
3773 o_n = tree_to_shwi (OMP_CLAUSE_ORDERED_EXPR (o));
3774 HOST_WIDE_INT c_n = 1;
3775 if (co)
3776 c_n = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (co));
3777 if (o_n != c_n)
3779 error_at (gimple_location (stmt),
3780 "%<ordered%> construct without %<doacross%> "
3781 "or %<depend%> clauses binds to loop where "
3782 "%<collapse%> argument %wd is different from "
3783 "%<ordered%> argument %wd", c_n, o_n);
3784 return false;
3788 return true;
3789 case GIMPLE_OMP_TARGET:
3790 if (gimple_omp_target_kind (ctx->stmt)
3791 != GF_OMP_TARGET_KIND_REGION)
3792 break;
3793 /* FALLTHRU */
3794 case GIMPLE_OMP_PARALLEL:
3795 case GIMPLE_OMP_TEAMS:
3796 error_at (gimple_location (stmt),
3797 "%<ordered%> region must be closely nested inside "
3798 "a loop region with an %<ordered%> clause");
3799 return false;
3800 default:
3801 break;
3803 break;
3804 case GIMPLE_OMP_CRITICAL:
3806 tree this_stmt_name
3807 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
3808 for (; ctx != NULL; ctx = ctx->outer)
3809 if (gomp_critical *other_crit
3810 = dyn_cast <gomp_critical *> (ctx->stmt))
3811 if (this_stmt_name == gimple_omp_critical_name (other_crit))
3813 error_at (gimple_location (stmt),
3814 "%<critical%> region may not be nested inside "
3815 "a %<critical%> region with the same name");
3816 return false;
3819 break;
3820 case GIMPLE_OMP_TEAMS:
3821 if (ctx == NULL)
3822 break;
3823 else if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
3824 || (gimple_omp_target_kind (ctx->stmt)
3825 != GF_OMP_TARGET_KIND_REGION))
3827 /* Teams construct can appear either strictly nested inside of
3828 target construct with no intervening stmts, or can be encountered
3829 only by initial task (so must not appear inside any OpenMP
3830 construct. */
3831 error_at (gimple_location (stmt),
3832 "%<teams%> construct must be closely nested inside of "
3833 "%<target%> construct or not nested in any OpenMP "
3834 "construct");
3835 return false;
3837 break;
3838 case GIMPLE_OMP_TARGET:
3839 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3840 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS)
3842 enum omp_clause_doacross_kind kind = OMP_CLAUSE_DOACROSS_KIND (c);
3843 error_at (OMP_CLAUSE_LOCATION (c),
3844 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3845 kind == OMP_CLAUSE_DOACROSS_SOURCE ? "source" : "sink");
3846 return false;
3848 if (is_gimple_omp_offloaded (stmt)
3849 && oacc_get_fn_attrib (cfun->decl) != NULL)
3851 error_at (gimple_location (stmt),
3852 "OpenACC region inside of OpenACC routine, nested "
3853 "parallelism not supported yet");
3854 return false;
3856 for (; ctx != NULL; ctx = ctx->outer)
3858 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
3860 if (is_gimple_omp (stmt)
3861 && is_gimple_omp_oacc (stmt)
3862 && is_gimple_omp (ctx->stmt))
3864 error_at (gimple_location (stmt),
3865 "OpenACC construct inside of non-OpenACC region");
3866 return false;
3868 continue;
3871 const char *stmt_name, *ctx_stmt_name;
3872 switch (gimple_omp_target_kind (stmt))
3874 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
3875 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
3876 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
3877 case GF_OMP_TARGET_KIND_ENTER_DATA:
3878 stmt_name = "target enter data"; break;
3879 case GF_OMP_TARGET_KIND_EXIT_DATA:
3880 stmt_name = "target exit data"; break;
3881 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
3882 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
3883 case GF_OMP_TARGET_KIND_OACC_SERIAL: stmt_name = "serial"; break;
3884 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
3885 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
3886 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA:
3887 stmt_name = "enter data"; break;
3888 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA:
3889 stmt_name = "exit data"; break;
3890 case GF_OMP_TARGET_KIND_OACC_DECLARE: stmt_name = "declare"; break;
3891 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
3892 break;
3893 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3894 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3895 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
3896 /* OpenACC 'kernels' decomposed parts. */
3897 stmt_name = "kernels"; break;
3898 default: gcc_unreachable ();
3900 switch (gimple_omp_target_kind (ctx->stmt))
3902 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
3903 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
3904 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3905 ctx_stmt_name = "parallel"; break;
3906 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3907 ctx_stmt_name = "kernels"; break;
3908 case GF_OMP_TARGET_KIND_OACC_SERIAL:
3909 ctx_stmt_name = "serial"; break;
3910 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
3911 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
3912 ctx_stmt_name = "host_data"; break;
3913 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3914 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3915 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
3916 /* OpenACC 'kernels' decomposed parts. */
3917 ctx_stmt_name = "kernels"; break;
3918 default: gcc_unreachable ();
3921 /* OpenACC/OpenMP mismatch? */
3922 if (is_gimple_omp_oacc (stmt)
3923 != is_gimple_omp_oacc (ctx->stmt))
3925 error_at (gimple_location (stmt),
3926 "%s %qs construct inside of %s %qs region",
3927 (is_gimple_omp_oacc (stmt)
3928 ? "OpenACC" : "OpenMP"), stmt_name,
3929 (is_gimple_omp_oacc (ctx->stmt)
3930 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3931 return false;
3933 if (is_gimple_omp_offloaded (ctx->stmt))
3935 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3936 if (is_gimple_omp_oacc (ctx->stmt))
3938 error_at (gimple_location (stmt),
3939 "%qs construct inside of %qs region",
3940 stmt_name, ctx_stmt_name);
3941 return false;
3943 else
3945 if ((gimple_omp_target_kind (ctx->stmt)
3946 == GF_OMP_TARGET_KIND_REGION)
3947 && (gimple_omp_target_kind (stmt)
3948 == GF_OMP_TARGET_KIND_REGION))
3950 c = omp_find_clause (gimple_omp_target_clauses (stmt),
3951 OMP_CLAUSE_DEVICE);
3952 if (c && OMP_CLAUSE_DEVICE_ANCESTOR (c))
3953 break;
3955 warning_at (gimple_location (stmt), OPT_Wopenmp,
3956 "%qs construct inside of %qs region",
3957 stmt_name, ctx_stmt_name);
3961 break;
3962 default:
3963 break;
3965 return true;
3969 /* Helper function scan_omp.
3971 Callback for walk_tree or operators in walk_gimple_stmt used to
3972 scan for OMP directives in TP. */
3974 static tree
3975 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3977 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3978 omp_context *ctx = (omp_context *) wi->info;
3979 tree t = *tp;
3980 tree tmp;
3982 switch (TREE_CODE (t))
3984 case VAR_DECL:
3985 case PARM_DECL:
3986 case LABEL_DECL:
3987 case RESULT_DECL:
3988 if (ctx)
3990 tmp = NULL_TREE;
3991 if (TREE_CODE (t) == VAR_DECL
3992 && (tmp = lookup_attribute ("omp allocate var",
3993 DECL_ATTRIBUTES (t))) != NULL_TREE)
3994 t = TREE_VALUE (TREE_VALUE (tmp));
3995 tree repl = remap_decl (t, &ctx->cb);
3996 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3997 if (tmp != NULL_TREE && t != repl)
3998 *tp = build_fold_addr_expr (repl);
3999 else if (tmp == NULL_TREE)
4000 *tp = repl;
4002 break;
4004 case INDIRECT_REF:
4005 case MEM_REF:
4006 if (ctx
4007 && TREE_CODE (TREE_OPERAND (t, 0)) == VAR_DECL
4008 && ((tmp = lookup_attribute ("omp allocate var",
4009 DECL_ATTRIBUTES (TREE_OPERAND (t, 0))))
4010 != NULL_TREE))
4012 tmp = TREE_VALUE (TREE_VALUE (tmp));
4013 tree repl = remap_decl (tmp, &ctx->cb);
4014 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
4015 if (tmp != repl)
4016 *tp = repl;
4017 break;
4019 gcc_fallthrough ();
4021 default:
4022 if (ctx && TYPE_P (t))
4023 *tp = remap_type (t, &ctx->cb);
4024 else if (!DECL_P (t))
4026 *walk_subtrees = 1;
4027 if (ctx)
4029 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
4030 if (tem != TREE_TYPE (t))
4032 if (TREE_CODE (t) == INTEGER_CST)
4033 *tp = wide_int_to_tree (tem, wi::to_wide (t));
4034 else
4035 TREE_TYPE (t) = tem;
4039 break;
4042 return NULL_TREE;
4045 /* Return true if FNDECL is a setjmp or a longjmp. */
4047 static bool
4048 setjmp_or_longjmp_p (const_tree fndecl)
4050 if (fndecl_built_in_p (fndecl, BUILT_IN_SETJMP, BUILT_IN_LONGJMP))
4051 return true;
4053 tree declname = DECL_NAME (fndecl);
4054 if (!declname
4055 || (DECL_CONTEXT (fndecl) != NULL_TREE
4056 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
4057 || !TREE_PUBLIC (fndecl))
4058 return false;
4060 const char *name = IDENTIFIER_POINTER (declname);
4061 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
4064 /* Helper function for scan_omp.
4066 Callback for walk_gimple_stmt used to scan for OMP directives in
4067 the current statement in GSI. */
4069 static tree
4070 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
4071 struct walk_stmt_info *wi)
4073 gimple *stmt = gsi_stmt (*gsi);
4074 omp_context *ctx = (omp_context *) wi->info;
4076 if (gimple_has_location (stmt))
4077 input_location = gimple_location (stmt);
4079 /* Check the nesting restrictions. */
4080 bool remove = false;
4081 if (is_gimple_omp (stmt))
4082 remove = !check_omp_nesting_restrictions (stmt, ctx);
4083 else if (is_gimple_call (stmt))
4085 tree fndecl = gimple_call_fndecl (stmt);
4086 if (fndecl)
4088 if (ctx
4089 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4090 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
4091 && setjmp_or_longjmp_p (fndecl)
4092 && !ctx->loop_p)
4094 remove = true;
4095 error_at (gimple_location (stmt),
4096 "setjmp/longjmp inside %<simd%> construct");
4098 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
4099 switch (DECL_FUNCTION_CODE (fndecl))
4101 case BUILT_IN_GOMP_BARRIER:
4102 case BUILT_IN_GOMP_CANCEL:
4103 case BUILT_IN_GOMP_CANCELLATION_POINT:
4104 case BUILT_IN_GOMP_TASKYIELD:
4105 case BUILT_IN_GOMP_TASKWAIT:
4106 case BUILT_IN_GOMP_TASKGROUP_START:
4107 case BUILT_IN_GOMP_TASKGROUP_END:
4108 remove = !check_omp_nesting_restrictions (stmt, ctx);
4109 break;
4110 default:
4111 break;
4113 else if (ctx)
4115 omp_context *octx = ctx;
4116 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN && ctx->outer)
4117 octx = ctx->outer;
4118 if (octx->order_concurrent && omp_runtime_api_call (fndecl))
4120 remove = true;
4121 error_at (gimple_location (stmt),
4122 "OpenMP runtime API call %qD in a region with "
4123 "%<order(concurrent)%> clause", fndecl);
4125 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4126 && omp_runtime_api_call (fndecl)
4127 && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl))
4128 != strlen ("omp_get_num_teams"))
4129 || strcmp (IDENTIFIER_POINTER (DECL_NAME (fndecl)),
4130 "omp_get_num_teams") != 0)
4131 && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl))
4132 != strlen ("omp_get_team_num"))
4133 || strcmp (IDENTIFIER_POINTER (DECL_NAME (fndecl)),
4134 "omp_get_team_num") != 0))
4136 remove = true;
4137 error_at (gimple_location (stmt),
4138 "OpenMP runtime API call %qD strictly nested in a "
4139 "%<teams%> region", fndecl);
4141 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET
4142 && (gimple_omp_target_kind (ctx->stmt)
4143 == GF_OMP_TARGET_KIND_REGION)
4144 && omp_runtime_api_call (fndecl))
4146 tree tgt_clauses = gimple_omp_target_clauses (ctx->stmt);
4147 tree c = omp_find_clause (tgt_clauses, OMP_CLAUSE_DEVICE);
4148 if (c && OMP_CLAUSE_DEVICE_ANCESTOR (c))
4149 error_at (gimple_location (stmt),
4150 "OpenMP runtime API call %qD in a region with "
4151 "%<device(ancestor)%> clause", fndecl);
4156 if (remove)
4158 stmt = gimple_build_nop ();
4159 gsi_replace (gsi, stmt, false);
4162 *handled_ops_p = true;
4164 switch (gimple_code (stmt))
4166 case GIMPLE_OMP_PARALLEL:
4167 taskreg_nesting_level++;
4168 scan_omp_parallel (gsi, ctx);
4169 taskreg_nesting_level--;
4170 break;
4172 case GIMPLE_OMP_TASK:
4173 taskreg_nesting_level++;
4174 scan_omp_task (gsi, ctx);
4175 taskreg_nesting_level--;
4176 break;
4178 case GIMPLE_OMP_FOR:
4179 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
4180 == GF_OMP_FOR_KIND_SIMD)
4181 && gimple_omp_for_combined_into_p (stmt)
4182 && gimple_code (ctx->stmt) != GIMPLE_OMP_SCAN)
4184 tree clauses = gimple_omp_for_clauses (as_a <gomp_for *> (stmt));
4185 tree c = omp_find_clause (clauses, OMP_CLAUSE_REDUCTION);
4186 if (c && OMP_CLAUSE_REDUCTION_INSCAN (c) && !seen_error ())
4188 scan_omp_simd_scan (gsi, as_a <gomp_for *> (stmt), ctx);
4189 break;
4192 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
4193 == GF_OMP_FOR_KIND_SIMD)
4194 && omp_maybe_offloaded_ctx (ctx)
4195 && omp_max_simt_vf ()
4196 && gimple_omp_for_collapse (stmt) == 1)
4197 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
4198 else
4199 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
4200 break;
4202 case GIMPLE_OMP_SCOPE:
4203 ctx = new_omp_context (stmt, ctx);
4204 scan_sharing_clauses (gimple_omp_scope_clauses (stmt), ctx);
4205 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4206 break;
4208 case GIMPLE_OMP_SECTIONS:
4209 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
4210 break;
4212 case GIMPLE_OMP_SINGLE:
4213 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
4214 break;
4216 case GIMPLE_OMP_SCAN:
4217 if (tree clauses = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)))
4219 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_INCLUSIVE)
4220 ctx->scan_inclusive = true;
4221 else if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_EXCLUSIVE)
4222 ctx->scan_exclusive = true;
4224 /* FALLTHRU */
4225 case GIMPLE_OMP_SECTION:
4226 case GIMPLE_OMP_STRUCTURED_BLOCK:
4227 case GIMPLE_OMP_MASTER:
4228 case GIMPLE_OMP_ORDERED:
4229 case GIMPLE_OMP_CRITICAL:
4230 ctx = new_omp_context (stmt, ctx);
4231 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4232 break;
4234 case GIMPLE_OMP_MASKED:
4235 ctx = new_omp_context (stmt, ctx);
4236 scan_sharing_clauses (gimple_omp_masked_clauses (stmt), ctx);
4237 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4238 break;
4240 case GIMPLE_OMP_TASKGROUP:
4241 ctx = new_omp_context (stmt, ctx);
4242 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt), ctx);
4243 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4244 break;
4246 case GIMPLE_OMP_TARGET:
4247 if (is_gimple_omp_offloaded (stmt))
4249 taskreg_nesting_level++;
4250 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
4251 taskreg_nesting_level--;
4253 else
4254 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
4255 break;
4257 case GIMPLE_OMP_TEAMS:
4258 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
4260 taskreg_nesting_level++;
4261 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
4262 taskreg_nesting_level--;
4264 else
4265 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
4266 break;
4268 case GIMPLE_BIND:
4270 tree var;
4272 *handled_ops_p = false;
4273 if (ctx)
4274 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
4275 var ;
4276 var = DECL_CHAIN (var))
4277 insert_decl_map (&ctx->cb, var, var);
4279 break;
4280 default:
4281 *handled_ops_p = false;
4282 break;
4285 return NULL_TREE;
4289 /* Scan all the statements starting at the current statement. CTX
4290 contains context information about the OMP directives and
4291 clauses found during the scan. */
4293 static void
4294 scan_omp (gimple_seq *body_p, omp_context *ctx)
4296 location_t saved_location;
4297 struct walk_stmt_info wi;
4299 memset (&wi, 0, sizeof (wi));
4300 wi.info = ctx;
4301 wi.want_locations = true;
4303 saved_location = input_location;
4304 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
4305 input_location = saved_location;
4308 /* Re-gimplification and code generation routines. */
4310 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
4311 of BIND if in a method. */
4313 static void
4314 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
4316 if (DECL_ARGUMENTS (current_function_decl)
4317 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
4318 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
4319 == POINTER_TYPE))
4321 tree vars = gimple_bind_vars (bind);
4322 for (tree *pvar = &vars; *pvar; )
4323 if (omp_member_access_dummy_var (*pvar))
4324 *pvar = DECL_CHAIN (*pvar);
4325 else
4326 pvar = &DECL_CHAIN (*pvar);
4327 gimple_bind_set_vars (bind, vars);
4331 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
4332 block and its subblocks. */
4334 static void
4335 remove_member_access_dummy_vars (tree block)
4337 for (tree *pvar = &BLOCK_VARS (block); *pvar; )
4338 if (omp_member_access_dummy_var (*pvar))
4339 *pvar = DECL_CHAIN (*pvar);
4340 else
4341 pvar = &DECL_CHAIN (*pvar);
4343 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
4344 remove_member_access_dummy_vars (block);
4347 /* If a context was created for STMT when it was scanned, return it. */
4349 static omp_context *
4350 maybe_lookup_ctx (gimple *stmt)
4352 splay_tree_node n;
4353 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
4354 return n ? (omp_context *) n->value : NULL;
4358 /* Find the mapping for DECL in CTX or the immediately enclosing
4359 context that has a mapping for DECL.
4361 If CTX is a nested parallel directive, we may have to use the decl
4362 mappings created in CTX's parent context. Suppose that we have the
4363 following parallel nesting (variable UIDs showed for clarity):
4365 iD.1562 = 0;
4366 #omp parallel shared(iD.1562) -> outer parallel
4367 iD.1562 = iD.1562 + 1;
4369 #omp parallel shared (iD.1562) -> inner parallel
4370 iD.1562 = iD.1562 - 1;
4372 Each parallel structure will create a distinct .omp_data_s structure
4373 for copying iD.1562 in/out of the directive:
4375 outer parallel .omp_data_s.1.i -> iD.1562
4376 inner parallel .omp_data_s.2.i -> iD.1562
4378 A shared variable mapping will produce a copy-out operation before
4379 the parallel directive and a copy-in operation after it. So, in
4380 this case we would have:
4382 iD.1562 = 0;
4383 .omp_data_o.1.i = iD.1562;
4384 #omp parallel shared(iD.1562) -> outer parallel
4385 .omp_data_i.1 = &.omp_data_o.1
4386 .omp_data_i.1->i = .omp_data_i.1->i + 1;
4388 .omp_data_o.2.i = iD.1562; -> **
4389 #omp parallel shared(iD.1562) -> inner parallel
4390 .omp_data_i.2 = &.omp_data_o.2
4391 .omp_data_i.2->i = .omp_data_i.2->i - 1;
4394 ** This is a problem. The symbol iD.1562 cannot be referenced
4395 inside the body of the outer parallel region. But since we are
4396 emitting this copy operation while expanding the inner parallel
4397 directive, we need to access the CTX structure of the outer
4398 parallel directive to get the correct mapping:
4400 .omp_data_o.2.i = .omp_data_i.1->i
4402 Since there may be other workshare or parallel directives enclosing
4403 the parallel directive, it may be necessary to walk up the context
4404 parent chain. This is not a problem in general because nested
4405 parallelism happens only rarely. */
4407 static tree
4408 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
4410 tree t;
4411 omp_context *up;
4413 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
4414 t = maybe_lookup_decl (decl, up);
4416 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
4418 return t ? t : decl;
4422 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
4423 in outer contexts. */
4425 static tree
4426 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
4428 tree t = NULL;
4429 omp_context *up;
4431 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
4432 t = maybe_lookup_decl (decl, up);
4434 return t ? t : decl;
4438 /* Construct the initialization value for reduction operation OP. */
4440 tree
4441 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
4443 switch (op)
4445 case PLUS_EXPR:
4446 case MINUS_EXPR:
4447 case BIT_IOR_EXPR:
4448 case BIT_XOR_EXPR:
4449 case TRUTH_OR_EXPR:
4450 case TRUTH_ORIF_EXPR:
4451 case TRUTH_XOR_EXPR:
4452 case NE_EXPR:
4453 return build_zero_cst (type);
4455 case MULT_EXPR:
4456 case TRUTH_AND_EXPR:
4457 case TRUTH_ANDIF_EXPR:
4458 case EQ_EXPR:
4459 return fold_convert_loc (loc, type, integer_one_node);
4461 case BIT_AND_EXPR:
4462 return fold_convert_loc (loc, type, integer_minus_one_node);
4464 case MAX_EXPR:
4465 if (SCALAR_FLOAT_TYPE_P (type))
4467 REAL_VALUE_TYPE min;
4468 if (HONOR_INFINITIES (type))
4469 real_arithmetic (&min, NEGATE_EXPR, &dconstinf, NULL);
4470 else
4471 real_maxval (&min, 1, TYPE_MODE (type));
4472 return build_real (type, min);
4474 else if (POINTER_TYPE_P (type))
4476 wide_int min
4477 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4478 return wide_int_to_tree (type, min);
4480 else
4482 gcc_assert (INTEGRAL_TYPE_P (type));
4483 return TYPE_MIN_VALUE (type);
4486 case MIN_EXPR:
4487 if (SCALAR_FLOAT_TYPE_P (type))
4489 REAL_VALUE_TYPE max;
4490 if (HONOR_INFINITIES (type))
4491 max = dconstinf;
4492 else
4493 real_maxval (&max, 0, TYPE_MODE (type));
4494 return build_real (type, max);
4496 else if (POINTER_TYPE_P (type))
4498 wide_int max
4499 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4500 return wide_int_to_tree (type, max);
4502 else
4504 gcc_assert (INTEGRAL_TYPE_P (type));
4505 return TYPE_MAX_VALUE (type);
4508 default:
4509 gcc_unreachable ();
4513 /* Construct the initialization value for reduction CLAUSE. */
4515 tree
4516 omp_reduction_init (tree clause, tree type)
4518 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
4519 OMP_CLAUSE_REDUCTION_CODE (clause), type);
4522 /* Return alignment to be assumed for var in CLAUSE, which should be
4523 OMP_CLAUSE_ALIGNED. */
4525 static tree
4526 omp_clause_aligned_alignment (tree clause)
4528 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
4529 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
4531 /* Otherwise return implementation defined alignment. */
4532 unsigned int al = 1;
4533 opt_scalar_mode mode_iter;
4534 auto_vector_modes modes;
4535 targetm.vectorize.autovectorize_vector_modes (&modes, true);
4536 static enum mode_class classes[]
4537 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
4538 for (int i = 0; i < 4; i += 2)
4539 /* The for loop above dictates that we only walk through scalar classes. */
4540 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
4542 scalar_mode mode = mode_iter.require ();
4543 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
4544 if (GET_MODE_CLASS (vmode) != classes[i + 1])
4545 continue;
4546 machine_mode alt_vmode;
4547 for (unsigned int j = 0; j < modes.length (); ++j)
4548 if (related_vector_mode (modes[j], mode).exists (&alt_vmode)
4549 && known_ge (GET_MODE_SIZE (alt_vmode), GET_MODE_SIZE (vmode)))
4550 vmode = alt_vmode;
4552 tree type = lang_hooks.types.type_for_mode (mode, 1);
4553 if (type == NULL_TREE || TYPE_MODE (type) != mode)
4554 continue;
4555 type = build_vector_type_for_mode (type, vmode);
4556 if (TYPE_MODE (type) != vmode)
4557 continue;
4558 if (TYPE_ALIGN_UNIT (type) > al)
4559 al = TYPE_ALIGN_UNIT (type);
4561 return build_int_cst (integer_type_node, al);
4565 /* This structure is part of the interface between lower_rec_simd_input_clauses
4566 and lower_rec_input_clauses. */
4568 class omplow_simd_context {
4569 public:
4570 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
4571 tree idx;
4572 tree lane;
4573 tree lastlane;
4574 vec<tree, va_heap> simt_eargs;
4575 gimple_seq simt_dlist;
4576 poly_uint64 max_vf;
4577 bool is_simt;
4580 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
4581 privatization. */
4583 static bool
4584 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
4585 omplow_simd_context *sctx, tree &ivar,
4586 tree &lvar, tree *rvar = NULL,
4587 tree *rvar2 = NULL)
4589 if (known_eq (sctx->max_vf, 0U))
4591 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
4592 if (maybe_gt (sctx->max_vf, 1U))
4594 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4595 OMP_CLAUSE_SAFELEN);
4596 if (c)
4598 poly_uint64 safe_len;
4599 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
4600 || maybe_lt (safe_len, 1U))
4601 sctx->max_vf = 1;
4602 else
4603 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
4606 if (sctx->is_simt && !known_eq (sctx->max_vf, 1U))
4608 for (tree c = gimple_omp_for_clauses (ctx->stmt); c;
4609 c = OMP_CLAUSE_CHAIN (c))
4611 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4612 continue;
4614 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4616 /* UDR reductions are not supported yet for SIMT, disable
4617 SIMT. */
4618 sctx->max_vf = 1;
4619 break;
4622 if (truth_value_p (OMP_CLAUSE_REDUCTION_CODE (c))
4623 && !INTEGRAL_TYPE_P (TREE_TYPE (new_var)))
4625 /* Doing boolean operations on non-integral types is
4626 for conformance only, it's not worth supporting this
4627 for SIMT. */
4628 sctx->max_vf = 1;
4629 break;
4633 if (maybe_gt (sctx->max_vf, 1U))
4635 sctx->idx = create_tmp_var (unsigned_type_node);
4636 sctx->lane = create_tmp_var (unsigned_type_node);
4639 if (known_eq (sctx->max_vf, 1U))
4640 return false;
4642 if (sctx->is_simt)
4644 if (is_gimple_reg (new_var))
4646 ivar = lvar = new_var;
4647 return true;
4649 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
4650 ivar = lvar = create_tmp_var (type);
4651 TREE_ADDRESSABLE (ivar) = 1;
4652 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
4653 NULL, DECL_ATTRIBUTES (ivar));
4654 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
4655 tree clobber = build_clobber (type);
4656 gimple *g = gimple_build_assign (ivar, clobber);
4657 gimple_seq_add_stmt (&sctx->simt_dlist, g);
4659 else
4661 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
4662 tree avar = create_tmp_var_raw (atype);
4663 if (TREE_ADDRESSABLE (new_var))
4664 TREE_ADDRESSABLE (avar) = 1;
4665 DECL_ATTRIBUTES (avar)
4666 = tree_cons (get_identifier ("omp simd array"), NULL,
4667 DECL_ATTRIBUTES (avar));
4668 gimple_add_tmp_var (avar);
4669 tree iavar = avar;
4670 if (rvar && !ctx->for_simd_scan_phase)
4672 /* For inscan reductions, create another array temporary,
4673 which will hold the reduced value. */
4674 iavar = create_tmp_var_raw (atype);
4675 if (TREE_ADDRESSABLE (new_var))
4676 TREE_ADDRESSABLE (iavar) = 1;
4677 DECL_ATTRIBUTES (iavar)
4678 = tree_cons (get_identifier ("omp simd array"), NULL,
4679 tree_cons (get_identifier ("omp simd inscan"), NULL,
4680 DECL_ATTRIBUTES (iavar)));
4681 gimple_add_tmp_var (iavar);
4682 ctx->cb.decl_map->put (avar, iavar);
4683 if (sctx->lastlane == NULL_TREE)
4684 sctx->lastlane = create_tmp_var (unsigned_type_node);
4685 *rvar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar,
4686 sctx->lastlane, NULL_TREE, NULL_TREE);
4687 TREE_THIS_NOTRAP (*rvar) = 1;
4689 if (ctx->scan_exclusive)
4691 /* And for exclusive scan yet another one, which will
4692 hold the value during the scan phase. */
4693 tree savar = create_tmp_var_raw (atype);
4694 if (TREE_ADDRESSABLE (new_var))
4695 TREE_ADDRESSABLE (savar) = 1;
4696 DECL_ATTRIBUTES (savar)
4697 = tree_cons (get_identifier ("omp simd array"), NULL,
4698 tree_cons (get_identifier ("omp simd inscan "
4699 "exclusive"), NULL,
4700 DECL_ATTRIBUTES (savar)));
4701 gimple_add_tmp_var (savar);
4702 ctx->cb.decl_map->put (iavar, savar);
4703 *rvar2 = build4 (ARRAY_REF, TREE_TYPE (new_var), savar,
4704 sctx->idx, NULL_TREE, NULL_TREE);
4705 TREE_THIS_NOTRAP (*rvar2) = 1;
4708 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar, sctx->idx,
4709 NULL_TREE, NULL_TREE);
4710 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
4711 NULL_TREE, NULL_TREE);
4712 TREE_THIS_NOTRAP (ivar) = 1;
4713 TREE_THIS_NOTRAP (lvar) = 1;
4715 if (DECL_P (new_var))
4717 SET_DECL_VALUE_EXPR (new_var, lvar);
4718 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4720 return true;
4723 /* Helper function of lower_rec_input_clauses. For a reference
4724 in simd reduction, add an underlying variable it will reference. */
4726 static void
4727 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
4729 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
4730 if (TREE_CONSTANT (z))
4732 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
4733 get_name (new_vard));
4734 gimple_add_tmp_var (z);
4735 TREE_ADDRESSABLE (z) = 1;
4736 z = build_fold_addr_expr_loc (loc, z);
4737 gimplify_assign (new_vard, z, ilist);
4741 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
4742 code to emit (type) (tskred_temp[idx]). */
4744 static tree
4745 task_reduction_read (gimple_seq *ilist, tree tskred_temp, tree type,
4746 unsigned idx)
4748 unsigned HOST_WIDE_INT sz
4749 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node));
4750 tree r = build2 (MEM_REF, pointer_sized_int_node,
4751 tskred_temp, build_int_cst (TREE_TYPE (tskred_temp),
4752 idx * sz));
4753 tree v = create_tmp_var (pointer_sized_int_node);
4754 gimple *g = gimple_build_assign (v, r);
4755 gimple_seq_add_stmt (ilist, g);
4756 if (!useless_type_conversion_p (type, pointer_sized_int_node))
4758 v = create_tmp_var (type);
4759 g = gimple_build_assign (v, NOP_EXPR, gimple_assign_lhs (g));
4760 gimple_seq_add_stmt (ilist, g);
4762 return v;
4765 /* Lower early initialization of privatized variable NEW_VAR
4766 if it needs an allocator (has allocate clause). */
4768 static bool
4769 lower_private_allocate (tree var, tree new_var, tree &allocator,
4770 tree &allocate_ptr, gimple_seq *ilist,
4771 omp_context *ctx, bool is_ref, tree size)
4773 if (allocator)
4774 return false;
4775 gcc_assert (allocate_ptr == NULL_TREE);
4776 if (ctx->allocate_map
4777 && (DECL_P (new_var) || (TYPE_P (new_var) && size)))
4778 if (tree *allocatorp = ctx->allocate_map->get (var))
4779 allocator = *allocatorp;
4780 if (allocator == NULL_TREE)
4781 return false;
4782 if (!is_ref && omp_privatize_by_reference (var))
4784 allocator = NULL_TREE;
4785 return false;
4788 unsigned HOST_WIDE_INT ialign = 0;
4789 if (TREE_CODE (allocator) == TREE_LIST)
4791 ialign = tree_to_uhwi (TREE_VALUE (allocator));
4792 allocator = TREE_PURPOSE (allocator);
4794 if (TREE_CODE (allocator) != INTEGER_CST)
4795 allocator = build_outer_var_ref (allocator, ctx, OMP_CLAUSE_ALLOCATE);
4796 allocator = fold_convert (pointer_sized_int_node, allocator);
4797 if (TREE_CODE (allocator) != INTEGER_CST)
4799 tree var = create_tmp_var (TREE_TYPE (allocator));
4800 gimplify_assign (var, allocator, ilist);
4801 allocator = var;
4804 tree ptr_type, align, sz = size;
4805 if (TYPE_P (new_var))
4807 ptr_type = build_pointer_type (new_var);
4808 ialign = MAX (ialign, TYPE_ALIGN_UNIT (new_var));
4810 else if (is_ref)
4812 ptr_type = build_pointer_type (TREE_TYPE (TREE_TYPE (new_var)));
4813 ialign = MAX (ialign, TYPE_ALIGN_UNIT (TREE_TYPE (ptr_type)));
4815 else
4817 ptr_type = build_pointer_type (TREE_TYPE (new_var));
4818 ialign = MAX (ialign, DECL_ALIGN_UNIT (new_var));
4819 if (sz == NULL_TREE)
4820 sz = fold_convert (size_type_node, DECL_SIZE_UNIT (new_var));
4822 align = build_int_cst (size_type_node, ialign);
4823 if (TREE_CODE (sz) != INTEGER_CST)
4825 tree szvar = create_tmp_var (size_type_node);
4826 gimplify_assign (szvar, sz, ilist);
4827 sz = szvar;
4829 allocate_ptr = create_tmp_var (ptr_type);
4830 tree a = builtin_decl_explicit (BUILT_IN_GOMP_ALLOC);
4831 gimple *g = gimple_build_call (a, 3, align, sz, allocator);
4832 gimple_call_set_lhs (g, allocate_ptr);
4833 gimple_seq_add_stmt (ilist, g);
4834 if (!is_ref)
4836 tree x = build_simple_mem_ref (allocate_ptr);
4837 TREE_THIS_NOTRAP (x) = 1;
4838 SET_DECL_VALUE_EXPR (new_var, x);
4839 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4841 return true;
4844 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
4845 from the receiver (aka child) side and initializers for REFERENCE_TYPE
4846 private variables. Initialization statements go in ILIST, while calls
4847 to destructors go in DLIST. */
4849 static void
4850 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
4851 omp_context *ctx, struct omp_for_data *fd)
4853 tree c, copyin_seq, x, ptr;
4854 bool copyin_by_ref = false;
4855 bool lastprivate_firstprivate = false;
4856 bool reduction_omp_orig_ref = false;
4857 int pass;
4858 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4859 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
4860 omplow_simd_context sctx = omplow_simd_context ();
4861 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
4862 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
4863 gimple_seq llist[4] = { };
4864 tree nonconst_simd_if = NULL_TREE;
4866 copyin_seq = NULL;
4867 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
4869 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
4870 with data sharing clauses referencing variable sized vars. That
4871 is unnecessarily hard to support and very unlikely to result in
4872 vectorized code anyway. */
4873 if (is_simd)
4874 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4875 switch (OMP_CLAUSE_CODE (c))
4877 case OMP_CLAUSE_LINEAR:
4878 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4879 sctx.max_vf = 1;
4880 /* FALLTHRU */
4881 case OMP_CLAUSE_PRIVATE:
4882 case OMP_CLAUSE_FIRSTPRIVATE:
4883 case OMP_CLAUSE_LASTPRIVATE:
4884 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
4885 sctx.max_vf = 1;
4886 else if (omp_privatize_by_reference (OMP_CLAUSE_DECL (c)))
4888 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4889 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4890 sctx.max_vf = 1;
4892 break;
4893 case OMP_CLAUSE_REDUCTION:
4894 case OMP_CLAUSE_IN_REDUCTION:
4895 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
4896 || is_variable_sized (OMP_CLAUSE_DECL (c)))
4897 sctx.max_vf = 1;
4898 else if (omp_privatize_by_reference (OMP_CLAUSE_DECL (c)))
4900 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4901 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4902 sctx.max_vf = 1;
4904 break;
4905 case OMP_CLAUSE_IF:
4906 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c)))
4907 sctx.max_vf = 1;
4908 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c)) != INTEGER_CST)
4909 nonconst_simd_if = OMP_CLAUSE_IF_EXPR (c);
4910 break;
4911 case OMP_CLAUSE_SIMDLEN:
4912 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c)))
4913 sctx.max_vf = 1;
4914 break;
4915 case OMP_CLAUSE__CONDTEMP_:
4916 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
4917 if (sctx.is_simt)
4918 sctx.max_vf = 1;
4919 break;
4920 default:
4921 continue;
4924 /* Add a placeholder for simduid. */
4925 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
4926 sctx.simt_eargs.safe_push (NULL_TREE);
4928 unsigned task_reduction_cnt = 0;
4929 unsigned task_reduction_cntorig = 0;
4930 unsigned task_reduction_cnt_full = 0;
4931 unsigned task_reduction_cntorig_full = 0;
4932 unsigned task_reduction_other_cnt = 0;
4933 tree tskred_atype = NULL_TREE, tskred_avar = NULL_TREE;
4934 tree tskred_base = NULL_TREE, tskred_temp = NULL_TREE;
4935 /* Do all the fixed sized types in the first pass, and the variable sized
4936 types in the second pass. This makes sure that the scalar arguments to
4937 the variable sized types are processed before we use them in the
4938 variable sized operations. For task reductions we use 4 passes, in the
4939 first two we ignore them, in the third one gather arguments for
4940 GOMP_task_reduction_remap call and in the last pass actually handle
4941 the task reductions. */
4942 for (pass = 0; pass < ((task_reduction_cnt || task_reduction_other_cnt)
4943 ? 4 : 2); ++pass)
4945 if (pass == 2 && task_reduction_cnt)
4947 tskred_atype
4948 = build_array_type_nelts (ptr_type_node, task_reduction_cnt
4949 + task_reduction_cntorig);
4950 tskred_avar = create_tmp_var_raw (tskred_atype);
4951 gimple_add_tmp_var (tskred_avar);
4952 TREE_ADDRESSABLE (tskred_avar) = 1;
4953 task_reduction_cnt_full = task_reduction_cnt;
4954 task_reduction_cntorig_full = task_reduction_cntorig;
4956 else if (pass == 3 && task_reduction_cnt)
4958 x = builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP);
4959 gimple *g
4960 = gimple_build_call (x, 3, size_int (task_reduction_cnt),
4961 size_int (task_reduction_cntorig),
4962 build_fold_addr_expr (tskred_avar));
4963 gimple_seq_add_stmt (ilist, g);
4965 if (pass == 3 && task_reduction_other_cnt)
4967 /* For reduction clauses, build
4968 tskred_base = (void *) tskred_temp[2]
4969 + omp_get_thread_num () * tskred_temp[1]
4970 or if tskred_temp[1] is known to be constant, that constant
4971 directly. This is the start of the private reduction copy block
4972 for the current thread. */
4973 tree v = create_tmp_var (integer_type_node);
4974 x = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
4975 gimple *g = gimple_build_call (x, 0);
4976 gimple_call_set_lhs (g, v);
4977 gimple_seq_add_stmt (ilist, g);
4978 c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
4979 tskred_temp = OMP_CLAUSE_DECL (c);
4980 if (is_taskreg_ctx (ctx))
4981 tskred_temp = lookup_decl (tskred_temp, ctx);
4982 tree v2 = create_tmp_var (sizetype);
4983 g = gimple_build_assign (v2, NOP_EXPR, v);
4984 gimple_seq_add_stmt (ilist, g);
4985 if (ctx->task_reductions[0])
4986 v = fold_convert (sizetype, ctx->task_reductions[0]);
4987 else
4988 v = task_reduction_read (ilist, tskred_temp, sizetype, 1);
4989 tree v3 = create_tmp_var (sizetype);
4990 g = gimple_build_assign (v3, MULT_EXPR, v2, v);
4991 gimple_seq_add_stmt (ilist, g);
4992 v = task_reduction_read (ilist, tskred_temp, ptr_type_node, 2);
4993 tskred_base = create_tmp_var (ptr_type_node);
4994 g = gimple_build_assign (tskred_base, POINTER_PLUS_EXPR, v, v3);
4995 gimple_seq_add_stmt (ilist, g);
4997 task_reduction_cnt = 0;
4998 task_reduction_cntorig = 0;
4999 task_reduction_other_cnt = 0;
5000 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5002 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
5003 tree var, new_var;
5004 bool by_ref;
5005 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5006 bool task_reduction_p = false;
5007 bool task_reduction_needs_orig_p = false;
5008 tree cond = NULL_TREE;
5009 tree allocator, allocate_ptr;
5011 switch (c_kind)
5013 case OMP_CLAUSE_PRIVATE:
5014 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
5015 continue;
5016 break;
5017 case OMP_CLAUSE_SHARED:
5018 /* Ignore shared directives in teams construct inside
5019 of target construct. */
5020 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
5021 && !is_host_teams_ctx (ctx))
5022 continue;
5023 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
5025 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
5026 || is_global_var (OMP_CLAUSE_DECL (c)));
5027 continue;
5029 case OMP_CLAUSE_FIRSTPRIVATE:
5030 case OMP_CLAUSE_COPYIN:
5031 break;
5032 case OMP_CLAUSE_LINEAR:
5033 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
5034 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
5035 lastprivate_firstprivate = true;
5036 break;
5037 case OMP_CLAUSE_REDUCTION:
5038 case OMP_CLAUSE_IN_REDUCTION:
5039 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
5040 || is_task_ctx (ctx)
5041 || OMP_CLAUSE_REDUCTION_TASK (c))
5043 task_reduction_p = true;
5044 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
5046 task_reduction_other_cnt++;
5047 if (pass == 2)
5048 continue;
5050 else
5051 task_reduction_cnt++;
5052 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5054 var = OMP_CLAUSE_DECL (c);
5055 /* If var is a global variable that isn't privatized
5056 in outer contexts, we don't need to look up the
5057 original address, it is always the address of the
5058 global variable itself. */
5059 if (!DECL_P (var)
5060 || omp_privatize_by_reference (var)
5061 || !is_global_var
5062 (maybe_lookup_decl_in_outer_ctx (var, ctx)))
5064 task_reduction_needs_orig_p = true;
5065 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5066 task_reduction_cntorig++;
5070 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5071 reduction_omp_orig_ref = true;
5072 break;
5073 case OMP_CLAUSE__REDUCTEMP_:
5074 if (!is_taskreg_ctx (ctx))
5075 continue;
5076 /* FALLTHRU */
5077 case OMP_CLAUSE__LOOPTEMP_:
5078 /* Handle _looptemp_/_reductemp_ clauses only on
5079 parallel/task. */
5080 if (fd)
5081 continue;
5082 break;
5083 case OMP_CLAUSE_LASTPRIVATE:
5084 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5086 lastprivate_firstprivate = true;
5087 if (pass != 0 || is_taskloop_ctx (ctx))
5088 continue;
5090 /* Even without corresponding firstprivate, if
5091 decl is Fortran allocatable, it needs outer var
5092 reference. */
5093 else if (pass == 0
5094 && lang_hooks.decls.omp_private_outer_ref
5095 (OMP_CLAUSE_DECL (c)))
5096 lastprivate_firstprivate = true;
5097 break;
5098 case OMP_CLAUSE_ALIGNED:
5099 if (pass != 1)
5100 continue;
5101 var = OMP_CLAUSE_DECL (c);
5102 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
5103 && !is_global_var (var))
5105 new_var = maybe_lookup_decl (var, ctx);
5106 if (new_var == NULL_TREE)
5107 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
5108 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
5109 tree alarg = omp_clause_aligned_alignment (c);
5110 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
5111 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
5112 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5113 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
5114 gimplify_and_add (x, ilist);
5116 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
5117 && is_global_var (var))
5119 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
5120 new_var = lookup_decl (var, ctx);
5121 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
5122 t = build_fold_addr_expr_loc (clause_loc, t);
5123 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
5124 tree alarg = omp_clause_aligned_alignment (c);
5125 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
5126 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
5127 t = fold_convert_loc (clause_loc, ptype, t);
5128 x = create_tmp_var (ptype);
5129 t = build2 (MODIFY_EXPR, ptype, x, t);
5130 gimplify_and_add (t, ilist);
5131 t = build_simple_mem_ref_loc (clause_loc, x);
5132 SET_DECL_VALUE_EXPR (new_var, t);
5133 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5135 continue;
5136 case OMP_CLAUSE__CONDTEMP_:
5137 if (is_parallel_ctx (ctx)
5138 || (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c)))
5139 break;
5140 continue;
5141 default:
5142 continue;
5145 if (task_reduction_p != (pass >= 2))
5146 continue;
5148 allocator = NULL_TREE;
5149 allocate_ptr = NULL_TREE;
5150 new_var = var = OMP_CLAUSE_DECL (c);
5151 if ((c_kind == OMP_CLAUSE_REDUCTION
5152 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5153 && TREE_CODE (var) == MEM_REF)
5155 var = TREE_OPERAND (var, 0);
5156 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
5157 var = TREE_OPERAND (var, 0);
5158 if (TREE_CODE (var) == INDIRECT_REF
5159 || TREE_CODE (var) == ADDR_EXPR)
5160 var = TREE_OPERAND (var, 0);
5161 if (is_variable_sized (var))
5163 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
5164 var = DECL_VALUE_EXPR (var);
5165 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5166 var = TREE_OPERAND (var, 0);
5167 gcc_assert (DECL_P (var));
5169 new_var = var;
5171 if (c_kind == OMP_CLAUSE_IN_REDUCTION && is_omp_target (ctx->stmt))
5173 splay_tree_key key = (splay_tree_key) &DECL_CONTEXT (var);
5174 new_var = (tree) splay_tree_lookup (ctx->field_map, key)->value;
5176 else if (c_kind != OMP_CLAUSE_COPYIN)
5177 new_var = lookup_decl (var, ctx);
5179 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
5181 if (pass != 0)
5182 continue;
5184 /* C/C++ array section reductions. */
5185 else if ((c_kind == OMP_CLAUSE_REDUCTION
5186 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5187 && var != OMP_CLAUSE_DECL (c))
5189 if (pass == 0)
5190 continue;
5192 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
5193 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
5195 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
5197 tree b = TREE_OPERAND (orig_var, 1);
5198 if (is_omp_target (ctx->stmt))
5199 b = NULL_TREE;
5200 else
5201 b = maybe_lookup_decl (b, ctx);
5202 if (b == NULL)
5204 b = TREE_OPERAND (orig_var, 1);
5205 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
5207 if (integer_zerop (bias))
5208 bias = b;
5209 else
5211 bias = fold_convert_loc (clause_loc,
5212 TREE_TYPE (b), bias);
5213 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5214 TREE_TYPE (b), b, bias);
5216 orig_var = TREE_OPERAND (orig_var, 0);
5218 if (pass == 2)
5220 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
5221 if (is_global_var (out)
5222 && TREE_CODE (TREE_TYPE (out)) != POINTER_TYPE
5223 && (TREE_CODE (TREE_TYPE (out)) != REFERENCE_TYPE
5224 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out)))
5225 != POINTER_TYPE)))
5226 x = var;
5227 else if (is_omp_target (ctx->stmt))
5228 x = out;
5229 else
5231 bool by_ref = use_pointer_for_field (var, NULL);
5232 x = build_receiver_ref (var, by_ref, ctx);
5233 if (TREE_CODE (TREE_TYPE (var)) == REFERENCE_TYPE
5234 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var)))
5235 == POINTER_TYPE))
5236 x = build_fold_addr_expr (x);
5238 if (TREE_CODE (orig_var) == INDIRECT_REF)
5239 x = build_simple_mem_ref (x);
5240 else if (TREE_CODE (orig_var) == ADDR_EXPR)
5242 if (var == TREE_OPERAND (orig_var, 0))
5243 x = build_fold_addr_expr (x);
5245 bias = fold_convert (sizetype, bias);
5246 x = fold_convert (ptr_type_node, x);
5247 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5248 TREE_TYPE (x), x, bias);
5249 unsigned cnt = task_reduction_cnt - 1;
5250 if (!task_reduction_needs_orig_p)
5251 cnt += (task_reduction_cntorig_full
5252 - task_reduction_cntorig);
5253 else
5254 cnt = task_reduction_cntorig - 1;
5255 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5256 size_int (cnt), NULL_TREE, NULL_TREE);
5257 gimplify_assign (r, x, ilist);
5258 continue;
5261 if (TREE_CODE (orig_var) == INDIRECT_REF
5262 || TREE_CODE (orig_var) == ADDR_EXPR)
5263 orig_var = TREE_OPERAND (orig_var, 0);
5264 tree d = OMP_CLAUSE_DECL (c);
5265 tree type = TREE_TYPE (d);
5266 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
5267 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5268 tree sz = v;
5269 const char *name = get_name (orig_var);
5270 if (pass != 3 && !TREE_CONSTANT (v))
5272 tree t;
5273 if (is_omp_target (ctx->stmt))
5274 t = NULL_TREE;
5275 else
5276 t = maybe_lookup_decl (v, ctx);
5277 if (t)
5278 v = t;
5279 else
5280 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5281 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
5282 t = fold_build2_loc (clause_loc, PLUS_EXPR,
5283 TREE_TYPE (v), v,
5284 build_int_cst (TREE_TYPE (v), 1));
5285 sz = fold_build2_loc (clause_loc, MULT_EXPR,
5286 TREE_TYPE (v), t,
5287 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5289 if (pass == 3)
5291 tree xv = create_tmp_var (ptr_type_node);
5292 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5294 unsigned cnt = task_reduction_cnt - 1;
5295 if (!task_reduction_needs_orig_p)
5296 cnt += (task_reduction_cntorig_full
5297 - task_reduction_cntorig);
5298 else
5299 cnt = task_reduction_cntorig - 1;
5300 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5301 size_int (cnt), NULL_TREE, NULL_TREE);
5303 gimple *g = gimple_build_assign (xv, x);
5304 gimple_seq_add_stmt (ilist, g);
5306 else
5308 unsigned int idx = *ctx->task_reduction_map->get (c);
5309 tree off;
5310 if (ctx->task_reductions[1 + idx])
5311 off = fold_convert (sizetype,
5312 ctx->task_reductions[1 + idx]);
5313 else
5314 off = task_reduction_read (ilist, tskred_temp, sizetype,
5315 7 + 3 * idx + 1);
5316 gimple *g = gimple_build_assign (xv, POINTER_PLUS_EXPR,
5317 tskred_base, off);
5318 gimple_seq_add_stmt (ilist, g);
5320 x = fold_convert (build_pointer_type (boolean_type_node),
5321 xv);
5322 if (TREE_CONSTANT (v))
5323 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x,
5324 TYPE_SIZE_UNIT (type));
5325 else
5327 tree t;
5328 if (is_omp_target (ctx->stmt))
5329 t = NULL_TREE;
5330 else
5331 t = maybe_lookup_decl (v, ctx);
5332 if (t)
5333 v = t;
5334 else
5335 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5336 gimplify_expr (&v, ilist, NULL, is_gimple_val,
5337 fb_rvalue);
5338 t = fold_build2_loc (clause_loc, PLUS_EXPR,
5339 TREE_TYPE (v), v,
5340 build_int_cst (TREE_TYPE (v), 1));
5341 t = fold_build2_loc (clause_loc, MULT_EXPR,
5342 TREE_TYPE (v), t,
5343 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5344 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x, t);
5346 cond = create_tmp_var (TREE_TYPE (x));
5347 gimplify_assign (cond, x, ilist);
5348 x = xv;
5350 else if (lower_private_allocate (var, type, allocator,
5351 allocate_ptr, ilist, ctx,
5352 true,
5353 TREE_CONSTANT (v)
5354 ? TYPE_SIZE_UNIT (type)
5355 : sz))
5356 x = allocate_ptr;
5357 else if (TREE_CONSTANT (v))
5359 x = create_tmp_var_raw (type, name);
5360 gimple_add_tmp_var (x);
5361 TREE_ADDRESSABLE (x) = 1;
5362 x = build_fold_addr_expr_loc (clause_loc, x);
5364 else
5366 tree atmp
5367 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5368 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
5369 x = build_call_expr_loc (clause_loc, atmp, 2, sz, al);
5372 tree ptype = build_pointer_type (TREE_TYPE (type));
5373 x = fold_convert_loc (clause_loc, ptype, x);
5374 tree y = create_tmp_var (ptype, name);
5375 gimplify_assign (y, x, ilist);
5376 x = y;
5377 tree yb = y;
5379 if (!integer_zerop (bias))
5381 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
5382 bias);
5383 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
5385 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
5386 pointer_sized_int_node, yb, bias);
5387 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
5388 yb = create_tmp_var (ptype, name);
5389 gimplify_assign (yb, x, ilist);
5390 x = yb;
5393 d = TREE_OPERAND (d, 0);
5394 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5395 d = TREE_OPERAND (d, 0);
5396 if (TREE_CODE (d) == ADDR_EXPR)
5398 if (orig_var != var)
5400 gcc_assert (is_variable_sized (orig_var));
5401 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
5403 gimplify_assign (new_var, x, ilist);
5404 tree new_orig_var = lookup_decl (orig_var, ctx);
5405 tree t = build_fold_indirect_ref (new_var);
5406 DECL_IGNORED_P (new_var) = 0;
5407 TREE_THIS_NOTRAP (t) = 1;
5408 SET_DECL_VALUE_EXPR (new_orig_var, t);
5409 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
5411 else
5413 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
5414 build_int_cst (ptype, 0));
5415 SET_DECL_VALUE_EXPR (new_var, x);
5416 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5419 else
5421 gcc_assert (orig_var == var);
5422 if (TREE_CODE (d) == INDIRECT_REF)
5424 x = create_tmp_var (ptype, name);
5425 TREE_ADDRESSABLE (x) = 1;
5426 gimplify_assign (x, yb, ilist);
5427 x = build_fold_addr_expr_loc (clause_loc, x);
5429 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5430 gimplify_assign (new_var, x, ilist);
5432 /* GOMP_taskgroup_reduction_register memsets the whole
5433 array to zero. If the initializer is zero, we don't
5434 need to initialize it again, just mark it as ever
5435 used unconditionally, i.e. cond = true. */
5436 if (cond
5437 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE
5438 && initializer_zerop (omp_reduction_init (c,
5439 TREE_TYPE (type))))
5441 gimple *g = gimple_build_assign (build_simple_mem_ref (cond),
5442 boolean_true_node);
5443 gimple_seq_add_stmt (ilist, g);
5444 continue;
5446 tree end = create_artificial_label (UNKNOWN_LOCATION);
5447 if (cond)
5449 gimple *g;
5450 if (!is_parallel_ctx (ctx))
5452 tree condv = create_tmp_var (boolean_type_node);
5453 g = gimple_build_assign (condv,
5454 build_simple_mem_ref (cond));
5455 gimple_seq_add_stmt (ilist, g);
5456 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
5457 g = gimple_build_cond (NE_EXPR, condv,
5458 boolean_false_node, end, lab1);
5459 gimple_seq_add_stmt (ilist, g);
5460 gimple_seq_add_stmt (ilist, gimple_build_label (lab1));
5462 g = gimple_build_assign (build_simple_mem_ref (cond),
5463 boolean_true_node);
5464 gimple_seq_add_stmt (ilist, g);
5467 tree y1 = create_tmp_var (ptype);
5468 gimplify_assign (y1, y, ilist);
5469 tree i2 = NULL_TREE, y2 = NULL_TREE;
5470 tree body2 = NULL_TREE, end2 = NULL_TREE;
5471 tree y3 = NULL_TREE, y4 = NULL_TREE;
5472 if (task_reduction_needs_orig_p)
5474 y3 = create_tmp_var (ptype);
5475 tree ref;
5476 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5477 ref = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5478 size_int (task_reduction_cnt_full
5479 + task_reduction_cntorig - 1),
5480 NULL_TREE, NULL_TREE);
5481 else
5483 unsigned int idx = *ctx->task_reduction_map->get (c);
5484 ref = task_reduction_read (ilist, tskred_temp, ptype,
5485 7 + 3 * idx);
5487 gimplify_assign (y3, ref, ilist);
5489 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
5491 if (pass != 3)
5493 y2 = create_tmp_var (ptype);
5494 gimplify_assign (y2, y, ilist);
5496 if (is_simd || OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5498 tree ref = build_outer_var_ref (var, ctx);
5499 /* For ref build_outer_var_ref already performs this. */
5500 if (TREE_CODE (d) == INDIRECT_REF)
5501 gcc_assert (omp_privatize_by_reference (var));
5502 else if (TREE_CODE (d) == ADDR_EXPR)
5503 ref = build_fold_addr_expr (ref);
5504 else if (omp_privatize_by_reference (var))
5505 ref = build_fold_addr_expr (ref);
5506 ref = fold_convert_loc (clause_loc, ptype, ref);
5507 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5508 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5510 y3 = create_tmp_var (ptype);
5511 gimplify_assign (y3, unshare_expr (ref), ilist);
5513 if (is_simd)
5515 y4 = create_tmp_var (ptype);
5516 gimplify_assign (y4, ref, dlist);
5520 tree i = create_tmp_var (TREE_TYPE (v));
5521 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
5522 tree body = create_artificial_label (UNKNOWN_LOCATION);
5523 gimple_seq_add_stmt (ilist, gimple_build_label (body));
5524 if (y2)
5526 i2 = create_tmp_var (TREE_TYPE (v));
5527 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
5528 body2 = create_artificial_label (UNKNOWN_LOCATION);
5529 end2 = create_artificial_label (UNKNOWN_LOCATION);
5530 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
5532 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5534 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5535 tree decl_placeholder
5536 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5537 SET_DECL_VALUE_EXPR (decl_placeholder,
5538 build_simple_mem_ref (y1));
5539 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5540 SET_DECL_VALUE_EXPR (placeholder,
5541 y3 ? build_simple_mem_ref (y3)
5542 : error_mark_node);
5543 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5544 x = lang_hooks.decls.omp_clause_default_ctor
5545 (c, build_simple_mem_ref (y1),
5546 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
5547 if (x)
5548 gimplify_and_add (x, ilist);
5549 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5551 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5552 lower_omp (&tseq, ctx);
5553 gimple_seq_add_seq (ilist, tseq);
5555 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5556 if (is_simd)
5558 SET_DECL_VALUE_EXPR (decl_placeholder,
5559 build_simple_mem_ref (y2));
5560 SET_DECL_VALUE_EXPR (placeholder,
5561 build_simple_mem_ref (y4));
5562 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5563 lower_omp (&tseq, ctx);
5564 gimple_seq_add_seq (dlist, tseq);
5565 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5567 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5568 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
5569 if (y2)
5571 x = lang_hooks.decls.omp_clause_dtor
5572 (c, build_simple_mem_ref (y2));
5573 if (x)
5574 gimplify_and_add (x, dlist);
5577 else
5579 x = omp_reduction_init (c, TREE_TYPE (type));
5580 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
5582 /* reduction(-:var) sums up the partial results, so it
5583 acts identically to reduction(+:var). */
5584 if (code == MINUS_EXPR)
5585 code = PLUS_EXPR;
5587 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
5588 if (is_simd)
5590 x = build2 (code, TREE_TYPE (type),
5591 build_simple_mem_ref (y4),
5592 build_simple_mem_ref (y2));
5593 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
5596 gimple *g
5597 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
5598 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5599 gimple_seq_add_stmt (ilist, g);
5600 if (y3)
5602 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
5603 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5604 gimple_seq_add_stmt (ilist, g);
5606 g = gimple_build_assign (i, PLUS_EXPR, i,
5607 build_int_cst (TREE_TYPE (i), 1));
5608 gimple_seq_add_stmt (ilist, g);
5609 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5610 gimple_seq_add_stmt (ilist, g);
5611 gimple_seq_add_stmt (ilist, gimple_build_label (end));
5612 if (y2)
5614 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
5615 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5616 gimple_seq_add_stmt (dlist, g);
5617 if (y4)
5619 g = gimple_build_assign
5620 (y4, POINTER_PLUS_EXPR, y4,
5621 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5622 gimple_seq_add_stmt (dlist, g);
5624 g = gimple_build_assign (i2, PLUS_EXPR, i2,
5625 build_int_cst (TREE_TYPE (i2), 1));
5626 gimple_seq_add_stmt (dlist, g);
5627 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
5628 gimple_seq_add_stmt (dlist, g);
5629 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
5631 if (allocator)
5633 tree f = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
5634 g = gimple_build_call (f, 2, allocate_ptr, allocator);
5635 gimple_seq_add_stmt (dlist, g);
5637 continue;
5639 else if (pass == 2)
5641 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
5642 if (is_global_var (out))
5643 x = var;
5644 else if (is_omp_target (ctx->stmt))
5645 x = out;
5646 else
5648 bool by_ref = use_pointer_for_field (var, ctx);
5649 x = build_receiver_ref (var, by_ref, ctx);
5651 if (!omp_privatize_by_reference (var))
5652 x = build_fold_addr_expr (x);
5653 x = fold_convert (ptr_type_node, x);
5654 unsigned cnt = task_reduction_cnt - 1;
5655 if (!task_reduction_needs_orig_p)
5656 cnt += task_reduction_cntorig_full - task_reduction_cntorig;
5657 else
5658 cnt = task_reduction_cntorig - 1;
5659 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5660 size_int (cnt), NULL_TREE, NULL_TREE);
5661 gimplify_assign (r, x, ilist);
5662 continue;
5664 else if (pass == 3)
5666 tree type = TREE_TYPE (new_var);
5667 if (!omp_privatize_by_reference (var))
5668 type = build_pointer_type (type);
5669 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5671 unsigned cnt = task_reduction_cnt - 1;
5672 if (!task_reduction_needs_orig_p)
5673 cnt += (task_reduction_cntorig_full
5674 - task_reduction_cntorig);
5675 else
5676 cnt = task_reduction_cntorig - 1;
5677 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5678 size_int (cnt), NULL_TREE, NULL_TREE);
5680 else
5682 unsigned int idx = *ctx->task_reduction_map->get (c);
5683 tree off;
5684 if (ctx->task_reductions[1 + idx])
5685 off = fold_convert (sizetype,
5686 ctx->task_reductions[1 + idx]);
5687 else
5688 off = task_reduction_read (ilist, tskred_temp, sizetype,
5689 7 + 3 * idx + 1);
5690 x = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
5691 tskred_base, off);
5693 x = fold_convert (type, x);
5694 tree t;
5695 if (omp_privatize_by_reference (var))
5697 gimplify_assign (new_var, x, ilist);
5698 t = new_var;
5699 new_var = build_simple_mem_ref (new_var);
5701 else
5703 t = create_tmp_var (type);
5704 gimplify_assign (t, x, ilist);
5705 SET_DECL_VALUE_EXPR (new_var, build_simple_mem_ref (t));
5706 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5708 t = fold_convert (build_pointer_type (boolean_type_node), t);
5709 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
5710 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5711 cond = create_tmp_var (TREE_TYPE (t));
5712 gimplify_assign (cond, t, ilist);
5714 else if (is_variable_sized (var))
5716 /* For variable sized types, we need to allocate the
5717 actual storage here. Call alloca and store the
5718 result in the pointer decl that we created elsewhere. */
5719 if (pass == 0)
5720 continue;
5722 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
5724 tree tmp;
5726 ptr = DECL_VALUE_EXPR (new_var);
5727 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
5728 ptr = TREE_OPERAND (ptr, 0);
5729 gcc_assert (DECL_P (ptr));
5730 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
5732 if (lower_private_allocate (var, new_var, allocator,
5733 allocate_ptr, ilist, ctx,
5734 false, x))
5735 tmp = allocate_ptr;
5736 else
5738 /* void *tmp = __builtin_alloca */
5739 tree atmp
5740 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5741 gcall *stmt
5742 = gimple_build_call (atmp, 2, x,
5743 size_int (DECL_ALIGN (var)));
5744 cfun->calls_alloca = 1;
5745 tmp = create_tmp_var_raw (ptr_type_node);
5746 gimple_add_tmp_var (tmp);
5747 gimple_call_set_lhs (stmt, tmp);
5749 gimple_seq_add_stmt (ilist, stmt);
5752 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
5753 gimplify_assign (ptr, x, ilist);
5756 else if (omp_privatize_by_reference (var)
5757 && (c_kind != OMP_CLAUSE_FIRSTPRIVATE
5758 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)))
5760 /* For references that are being privatized for Fortran,
5761 allocate new backing storage for the new pointer
5762 variable. This allows us to avoid changing all the
5763 code that expects a pointer to something that expects
5764 a direct variable. */
5765 if (pass == 0)
5766 continue;
5768 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
5769 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
5771 x = build_receiver_ref (var, false, ctx);
5772 if (ctx->allocate_map)
5773 if (tree *allocatep = ctx->allocate_map->get (var))
5775 allocator = *allocatep;
5776 if (TREE_CODE (allocator) == TREE_LIST)
5777 allocator = TREE_PURPOSE (allocator);
5778 if (TREE_CODE (allocator) != INTEGER_CST)
5779 allocator = build_outer_var_ref (allocator, ctx);
5780 allocator = fold_convert (pointer_sized_int_node,
5781 allocator);
5782 allocate_ptr = unshare_expr (x);
5784 if (allocator == NULL_TREE)
5785 x = build_fold_addr_expr_loc (clause_loc, x);
5787 else if (lower_private_allocate (var, new_var, allocator,
5788 allocate_ptr,
5789 ilist, ctx, true, x))
5790 x = allocate_ptr;
5791 else if (TREE_CONSTANT (x))
5793 /* For reduction in SIMD loop, defer adding the
5794 initialization of the reference, because if we decide
5795 to use SIMD array for it, the initilization could cause
5796 expansion ICE. Ditto for other privatization clauses. */
5797 if (is_simd)
5798 x = NULL_TREE;
5799 else
5801 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
5802 get_name (var));
5803 gimple_add_tmp_var (x);
5804 TREE_ADDRESSABLE (x) = 1;
5805 x = build_fold_addr_expr_loc (clause_loc, x);
5808 else
5810 tree atmp
5811 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5812 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
5813 tree al = size_int (TYPE_ALIGN (rtype));
5814 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
5817 if (x)
5819 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5820 gimplify_assign (new_var, x, ilist);
5823 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5825 else if ((c_kind == OMP_CLAUSE_REDUCTION
5826 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5827 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5829 if (pass == 0)
5830 continue;
5832 else if (pass != 0)
5833 continue;
5835 switch (OMP_CLAUSE_CODE (c))
5837 case OMP_CLAUSE_SHARED:
5838 /* Ignore shared directives in teams construct inside
5839 target construct. */
5840 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
5841 && !is_host_teams_ctx (ctx))
5842 continue;
5843 /* Shared global vars are just accessed directly. */
5844 if (is_global_var (new_var))
5845 break;
5846 /* For taskloop firstprivate/lastprivate, represented
5847 as firstprivate and shared clause on the task, new_var
5848 is the firstprivate var. */
5849 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5850 break;
5851 /* Set up the DECL_VALUE_EXPR for shared variables now. This
5852 needs to be delayed until after fixup_child_record_type so
5853 that we get the correct type during the dereference. */
5854 by_ref = use_pointer_for_field (var, ctx);
5855 x = build_receiver_ref (var, by_ref, ctx);
5856 SET_DECL_VALUE_EXPR (new_var, x);
5857 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5859 /* ??? If VAR is not passed by reference, and the variable
5860 hasn't been initialized yet, then we'll get a warning for
5861 the store into the omp_data_s structure. Ideally, we'd be
5862 able to notice this and not store anything at all, but
5863 we're generating code too early. Suppress the warning. */
5864 if (!by_ref)
5865 suppress_warning (var, OPT_Wuninitialized);
5866 break;
5868 case OMP_CLAUSE__CONDTEMP_:
5869 if (is_parallel_ctx (ctx))
5871 x = build_receiver_ref (var, false, ctx);
5872 SET_DECL_VALUE_EXPR (new_var, x);
5873 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5875 else if (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c))
5877 x = build_zero_cst (TREE_TYPE (var));
5878 goto do_private;
5880 break;
5882 case OMP_CLAUSE_LASTPRIVATE:
5883 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5884 break;
5885 /* FALLTHRU */
5887 case OMP_CLAUSE_PRIVATE:
5888 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
5889 x = build_outer_var_ref (var, ctx);
5890 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5892 if (is_task_ctx (ctx))
5893 x = build_receiver_ref (var, false, ctx);
5894 else
5895 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
5897 else
5898 x = NULL;
5899 do_private:
5900 tree nx;
5901 bool copy_ctor;
5902 copy_ctor = false;
5903 lower_private_allocate (var, new_var, allocator, allocate_ptr,
5904 ilist, ctx, false, NULL_TREE);
5905 nx = unshare_expr (new_var);
5906 if (is_simd
5907 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5908 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
5909 copy_ctor = true;
5910 if (copy_ctor)
5911 nx = lang_hooks.decls.omp_clause_copy_ctor (c, nx, x);
5912 else
5913 nx = lang_hooks.decls.omp_clause_default_ctor (c, nx, x);
5914 if (is_simd)
5916 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
5917 if ((TREE_ADDRESSABLE (new_var) || nx || y
5918 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5919 && (gimple_omp_for_collapse (ctx->stmt) != 1
5920 || (gimple_omp_for_index (ctx->stmt, 0)
5921 != new_var)))
5922 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_
5923 || omp_privatize_by_reference (var))
5924 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5925 ivar, lvar))
5927 if (omp_privatize_by_reference (var))
5929 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5930 tree new_vard = TREE_OPERAND (new_var, 0);
5931 gcc_assert (DECL_P (new_vard));
5932 SET_DECL_VALUE_EXPR (new_vard,
5933 build_fold_addr_expr (lvar));
5934 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5937 if (nx)
5939 tree iv = unshare_expr (ivar);
5940 if (copy_ctor)
5941 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv,
5943 else
5944 x = lang_hooks.decls.omp_clause_default_ctor (c,
5948 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_)
5950 x = build2 (MODIFY_EXPR, TREE_TYPE (ivar),
5951 unshare_expr (ivar), x);
5952 nx = x;
5954 if (nx && x)
5955 gimplify_and_add (x, &llist[0]);
5956 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5957 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
5959 tree v = new_var;
5960 if (!DECL_P (v))
5962 gcc_assert (TREE_CODE (v) == MEM_REF);
5963 v = TREE_OPERAND (v, 0);
5964 gcc_assert (DECL_P (v));
5966 v = *ctx->lastprivate_conditional_map->get (v);
5967 tree t = create_tmp_var (TREE_TYPE (v));
5968 tree z = build_zero_cst (TREE_TYPE (v));
5969 tree orig_v
5970 = build_outer_var_ref (var, ctx,
5971 OMP_CLAUSE_LASTPRIVATE);
5972 gimple_seq_add_stmt (dlist,
5973 gimple_build_assign (t, z));
5974 gcc_assert (DECL_HAS_VALUE_EXPR_P (v));
5975 tree civar = DECL_VALUE_EXPR (v);
5976 gcc_assert (TREE_CODE (civar) == ARRAY_REF);
5977 civar = unshare_expr (civar);
5978 TREE_OPERAND (civar, 1) = sctx.idx;
5979 x = build2 (MODIFY_EXPR, TREE_TYPE (t), t,
5980 unshare_expr (civar));
5981 x = build2 (COMPOUND_EXPR, TREE_TYPE (orig_v), x,
5982 build2 (MODIFY_EXPR, TREE_TYPE (orig_v),
5983 orig_v, unshare_expr (ivar)));
5984 tree cond = build2 (LT_EXPR, boolean_type_node, t,
5985 civar);
5986 x = build3 (COND_EXPR, void_type_node, cond, x,
5987 void_node);
5988 gimple_seq tseq = NULL;
5989 gimplify_and_add (x, &tseq);
5990 if (ctx->outer)
5991 lower_omp (&tseq, ctx->outer);
5992 gimple_seq_add_seq (&llist[1], tseq);
5994 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5995 && ctx->for_simd_scan_phase)
5997 x = unshare_expr (ivar);
5998 tree orig_v
5999 = build_outer_var_ref (var, ctx,
6000 OMP_CLAUSE_LASTPRIVATE);
6001 x = lang_hooks.decls.omp_clause_assign_op (c, x,
6002 orig_v);
6003 gimplify_and_add (x, &llist[0]);
6005 if (y)
6007 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
6008 if (y)
6009 gimplify_and_add (y, &llist[1]);
6011 break;
6013 if (omp_privatize_by_reference (var))
6015 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6016 tree new_vard = TREE_OPERAND (new_var, 0);
6017 gcc_assert (DECL_P (new_vard));
6018 tree type = TREE_TYPE (TREE_TYPE (new_vard));
6019 x = TYPE_SIZE_UNIT (type);
6020 if (TREE_CONSTANT (x))
6022 x = create_tmp_var_raw (type, get_name (var));
6023 gimple_add_tmp_var (x);
6024 TREE_ADDRESSABLE (x) = 1;
6025 x = build_fold_addr_expr_loc (clause_loc, x);
6026 x = fold_convert_loc (clause_loc,
6027 TREE_TYPE (new_vard), x);
6028 gimplify_assign (new_vard, x, ilist);
6032 if (nx)
6033 gimplify_and_add (nx, ilist);
6034 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6035 && is_simd
6036 && ctx->for_simd_scan_phase)
6038 tree orig_v = build_outer_var_ref (var, ctx,
6039 OMP_CLAUSE_LASTPRIVATE);
6040 x = lang_hooks.decls.omp_clause_assign_op (c, new_var,
6041 orig_v);
6042 gimplify_and_add (x, ilist);
6044 /* FALLTHRU */
6046 do_dtor:
6047 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
6048 if (x)
6049 gimplify_and_add (x, dlist);
6050 if (allocator)
6052 if (!is_gimple_val (allocator))
6054 tree avar = create_tmp_var (TREE_TYPE (allocator));
6055 gimplify_assign (avar, allocator, dlist);
6056 allocator = avar;
6058 if (!is_gimple_val (allocate_ptr))
6060 tree apvar = create_tmp_var (TREE_TYPE (allocate_ptr));
6061 gimplify_assign (apvar, allocate_ptr, dlist);
6062 allocate_ptr = apvar;
6064 tree f = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
6065 gimple *g
6066 = gimple_build_call (f, 2, allocate_ptr, allocator);
6067 gimple_seq_add_stmt (dlist, g);
6069 break;
6071 case OMP_CLAUSE_LINEAR:
6072 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6073 goto do_firstprivate;
6074 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6075 x = NULL;
6076 else
6077 x = build_outer_var_ref (var, ctx);
6078 goto do_private;
6080 case OMP_CLAUSE_FIRSTPRIVATE:
6081 if (is_task_ctx (ctx))
6083 if ((omp_privatize_by_reference (var)
6084 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c))
6085 || is_variable_sized (var))
6086 goto do_dtor;
6087 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
6088 ctx))
6089 || use_pointer_for_field (var, NULL))
6091 x = build_receiver_ref (var, false, ctx);
6092 if (ctx->allocate_map)
6093 if (tree *allocatep = ctx->allocate_map->get (var))
6095 allocator = *allocatep;
6096 if (TREE_CODE (allocator) == TREE_LIST)
6097 allocator = TREE_PURPOSE (allocator);
6098 if (TREE_CODE (allocator) != INTEGER_CST)
6099 allocator = build_outer_var_ref (allocator, ctx);
6100 allocator = fold_convert (pointer_sized_int_node,
6101 allocator);
6102 allocate_ptr = unshare_expr (x);
6103 x = build_simple_mem_ref (x);
6104 TREE_THIS_NOTRAP (x) = 1;
6106 SET_DECL_VALUE_EXPR (new_var, x);
6107 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
6108 goto do_dtor;
6111 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)
6112 && omp_privatize_by_reference (var))
6114 x = build_outer_var_ref (var, ctx);
6115 gcc_assert (TREE_CODE (x) == MEM_REF
6116 && integer_zerop (TREE_OPERAND (x, 1)));
6117 x = TREE_OPERAND (x, 0);
6118 x = lang_hooks.decls.omp_clause_copy_ctor
6119 (c, unshare_expr (new_var), x);
6120 gimplify_and_add (x, ilist);
6121 goto do_dtor;
6123 do_firstprivate:
6124 lower_private_allocate (var, new_var, allocator, allocate_ptr,
6125 ilist, ctx, false, NULL_TREE);
6126 x = build_outer_var_ref (var, ctx);
6127 if (is_simd)
6129 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6130 && gimple_omp_for_combined_into_p (ctx->stmt))
6132 tree t = OMP_CLAUSE_LINEAR_STEP (c);
6133 if (DECL_P (t))
6134 t = build_outer_var_ref (t, ctx);
6135 tree stept = TREE_TYPE (t);
6136 tree ct = omp_find_clause (clauses,
6137 OMP_CLAUSE__LOOPTEMP_);
6138 gcc_assert (ct);
6139 tree l = OMP_CLAUSE_DECL (ct);
6140 tree n1 = fd->loop.n1;
6141 tree step = fd->loop.step;
6142 tree itype = TREE_TYPE (l);
6143 if (POINTER_TYPE_P (itype))
6144 itype = signed_type_for (itype);
6145 l = fold_build2 (MINUS_EXPR, itype, l, n1);
6146 if (TYPE_UNSIGNED (itype)
6147 && fd->loop.cond_code == GT_EXPR)
6148 l = fold_build2 (TRUNC_DIV_EXPR, itype,
6149 fold_build1 (NEGATE_EXPR, itype, l),
6150 fold_build1 (NEGATE_EXPR,
6151 itype, step));
6152 else
6153 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
6154 t = fold_build2 (MULT_EXPR, stept,
6155 fold_convert (stept, l), t);
6157 if (OMP_CLAUSE_LINEAR_ARRAY (c))
6159 if (omp_privatize_by_reference (var))
6161 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6162 tree new_vard = TREE_OPERAND (new_var, 0);
6163 gcc_assert (DECL_P (new_vard));
6164 tree type = TREE_TYPE (TREE_TYPE (new_vard));
6165 nx = TYPE_SIZE_UNIT (type);
6166 if (TREE_CONSTANT (nx))
6168 nx = create_tmp_var_raw (type,
6169 get_name (var));
6170 gimple_add_tmp_var (nx);
6171 TREE_ADDRESSABLE (nx) = 1;
6172 nx = build_fold_addr_expr_loc (clause_loc,
6173 nx);
6174 nx = fold_convert_loc (clause_loc,
6175 TREE_TYPE (new_vard),
6176 nx);
6177 gimplify_assign (new_vard, nx, ilist);
6181 x = lang_hooks.decls.omp_clause_linear_ctor
6182 (c, new_var, x, t);
6183 gimplify_and_add (x, ilist);
6184 goto do_dtor;
6187 if (POINTER_TYPE_P (TREE_TYPE (x)))
6188 x = fold_build_pointer_plus (x, t);
6189 else
6190 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x,
6191 fold_convert (TREE_TYPE (x), t));
6194 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
6195 || TREE_ADDRESSABLE (new_var)
6196 || omp_privatize_by_reference (var))
6197 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6198 ivar, lvar))
6200 if (omp_privatize_by_reference (var))
6202 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6203 tree new_vard = TREE_OPERAND (new_var, 0);
6204 gcc_assert (DECL_P (new_vard));
6205 SET_DECL_VALUE_EXPR (new_vard,
6206 build_fold_addr_expr (lvar));
6207 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6209 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
6211 tree iv = create_tmp_var (TREE_TYPE (new_var));
6212 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
6213 gimplify_and_add (x, ilist);
6214 gimple_stmt_iterator gsi
6215 = gsi_start (*gimple_omp_body_ptr (ctx->stmt));
6216 gassign *g
6217 = gimple_build_assign (unshare_expr (lvar), iv);
6218 gsi_insert_before_without_update (&gsi, g,
6219 GSI_SAME_STMT);
6220 tree t = OMP_CLAUSE_LINEAR_STEP (c);
6221 enum tree_code code = PLUS_EXPR;
6222 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
6223 code = POINTER_PLUS_EXPR;
6224 g = gimple_build_assign (iv, code, iv, t);
6225 gsi_insert_before_without_update (&gsi, g,
6226 GSI_SAME_STMT);
6227 break;
6229 x = lang_hooks.decls.omp_clause_copy_ctor
6230 (c, unshare_expr (ivar), x);
6231 gimplify_and_add (x, &llist[0]);
6232 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6233 if (x)
6234 gimplify_and_add (x, &llist[1]);
6235 break;
6237 if (omp_privatize_by_reference (var))
6239 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6240 tree new_vard = TREE_OPERAND (new_var, 0);
6241 gcc_assert (DECL_P (new_vard));
6242 tree type = TREE_TYPE (TREE_TYPE (new_vard));
6243 nx = TYPE_SIZE_UNIT (type);
6244 if (TREE_CONSTANT (nx))
6246 nx = create_tmp_var_raw (type, get_name (var));
6247 gimple_add_tmp_var (nx);
6248 TREE_ADDRESSABLE (nx) = 1;
6249 nx = build_fold_addr_expr_loc (clause_loc, nx);
6250 nx = fold_convert_loc (clause_loc,
6251 TREE_TYPE (new_vard), nx);
6252 gimplify_assign (new_vard, nx, ilist);
6256 x = lang_hooks.decls.omp_clause_copy_ctor
6257 (c, unshare_expr (new_var), x);
6258 gimplify_and_add (x, ilist);
6259 goto do_dtor;
6261 case OMP_CLAUSE__LOOPTEMP_:
6262 case OMP_CLAUSE__REDUCTEMP_:
6263 gcc_assert (is_taskreg_ctx (ctx));
6264 x = build_outer_var_ref (var, ctx);
6265 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
6266 gimplify_and_add (x, ilist);
6267 break;
6269 case OMP_CLAUSE_COPYIN:
6270 by_ref = use_pointer_for_field (var, NULL);
6271 x = build_receiver_ref (var, by_ref, ctx);
6272 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
6273 append_to_statement_list (x, &copyin_seq);
6274 copyin_by_ref |= by_ref;
6275 break;
6277 case OMP_CLAUSE_REDUCTION:
6278 case OMP_CLAUSE_IN_REDUCTION:
6279 /* OpenACC reductions are initialized using the
6280 GOACC_REDUCTION internal function. */
6281 if (is_gimple_omp_oacc (ctx->stmt))
6282 break;
6283 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6285 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
6286 gimple *tseq;
6287 tree ptype = TREE_TYPE (placeholder);
6288 if (cond)
6290 x = error_mark_node;
6291 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)
6292 && !task_reduction_needs_orig_p)
6293 x = var;
6294 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
6296 tree pptype = build_pointer_type (ptype);
6297 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
6298 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
6299 size_int (task_reduction_cnt_full
6300 + task_reduction_cntorig - 1),
6301 NULL_TREE, NULL_TREE);
6302 else
6304 unsigned int idx
6305 = *ctx->task_reduction_map->get (c);
6306 x = task_reduction_read (ilist, tskred_temp,
6307 pptype, 7 + 3 * idx);
6309 x = fold_convert (pptype, x);
6310 x = build_simple_mem_ref (x);
6313 else
6315 lower_private_allocate (var, new_var, allocator,
6316 allocate_ptr, ilist, ctx, false,
6317 NULL_TREE);
6318 x = build_outer_var_ref (var, ctx);
6320 if (omp_privatize_by_reference (var)
6321 && !useless_type_conversion_p (ptype, TREE_TYPE (x)))
6322 x = build_fold_addr_expr_loc (clause_loc, x);
6324 SET_DECL_VALUE_EXPR (placeholder, x);
6325 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
6326 tree new_vard = new_var;
6327 if (omp_privatize_by_reference (var))
6329 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6330 new_vard = TREE_OPERAND (new_var, 0);
6331 gcc_assert (DECL_P (new_vard));
6333 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
6334 if (is_simd
6335 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6336 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6337 rvarp = &rvar;
6338 if (is_simd
6339 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6340 ivar, lvar, rvarp,
6341 &rvar2))
6343 if (new_vard == new_var)
6345 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
6346 SET_DECL_VALUE_EXPR (new_var, ivar);
6348 else
6350 SET_DECL_VALUE_EXPR (new_vard,
6351 build_fold_addr_expr (ivar));
6352 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6354 x = lang_hooks.decls.omp_clause_default_ctor
6355 (c, unshare_expr (ivar),
6356 build_outer_var_ref (var, ctx));
6357 if (rvarp && ctx->for_simd_scan_phase)
6359 if (x)
6360 gimplify_and_add (x, &llist[0]);
6361 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6362 if (x)
6363 gimplify_and_add (x, &llist[1]);
6364 break;
6366 else if (rvarp)
6368 if (x)
6370 gimplify_and_add (x, &llist[0]);
6372 tree ivar2 = unshare_expr (lvar);
6373 TREE_OPERAND (ivar2, 1) = sctx.idx;
6374 x = lang_hooks.decls.omp_clause_default_ctor
6375 (c, ivar2, build_outer_var_ref (var, ctx));
6376 gimplify_and_add (x, &llist[0]);
6378 if (rvar2)
6380 x = lang_hooks.decls.omp_clause_default_ctor
6381 (c, unshare_expr (rvar2),
6382 build_outer_var_ref (var, ctx));
6383 gimplify_and_add (x, &llist[0]);
6386 /* For types that need construction, add another
6387 private var which will be default constructed
6388 and optionally initialized with
6389 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
6390 loop we want to assign this value instead of
6391 constructing and destructing it in each
6392 iteration. */
6393 tree nv = create_tmp_var_raw (TREE_TYPE (ivar));
6394 gimple_add_tmp_var (nv);
6395 ctx->cb.decl_map->put (TREE_OPERAND (rvar2
6396 ? rvar2
6397 : ivar, 0),
6398 nv);
6399 x = lang_hooks.decls.omp_clause_default_ctor
6400 (c, nv, build_outer_var_ref (var, ctx));
6401 gimplify_and_add (x, ilist);
6403 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6405 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6406 x = DECL_VALUE_EXPR (new_vard);
6407 tree vexpr = nv;
6408 if (new_vard != new_var)
6409 vexpr = build_fold_addr_expr (nv);
6410 SET_DECL_VALUE_EXPR (new_vard, vexpr);
6411 lower_omp (&tseq, ctx);
6412 SET_DECL_VALUE_EXPR (new_vard, x);
6413 gimple_seq_add_seq (ilist, tseq);
6414 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6417 x = lang_hooks.decls.omp_clause_dtor (c, nv);
6418 if (x)
6419 gimplify_and_add (x, dlist);
6422 tree ref = build_outer_var_ref (var, ctx);
6423 x = unshare_expr (ivar);
6424 x = lang_hooks.decls.omp_clause_assign_op (c, x,
6425 ref);
6426 gimplify_and_add (x, &llist[0]);
6428 ref = build_outer_var_ref (var, ctx);
6429 x = lang_hooks.decls.omp_clause_assign_op (c, ref,
6430 rvar);
6431 gimplify_and_add (x, &llist[3]);
6433 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6434 if (new_vard == new_var)
6435 SET_DECL_VALUE_EXPR (new_var, lvar);
6436 else
6437 SET_DECL_VALUE_EXPR (new_vard,
6438 build_fold_addr_expr (lvar));
6440 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6441 if (x)
6442 gimplify_and_add (x, &llist[1]);
6444 tree ivar2 = unshare_expr (lvar);
6445 TREE_OPERAND (ivar2, 1) = sctx.idx;
6446 x = lang_hooks.decls.omp_clause_dtor (c, ivar2);
6447 if (x)
6448 gimplify_and_add (x, &llist[1]);
6450 if (rvar2)
6452 x = lang_hooks.decls.omp_clause_dtor (c, rvar2);
6453 if (x)
6454 gimplify_and_add (x, &llist[1]);
6456 break;
6458 if (x)
6459 gimplify_and_add (x, &llist[0]);
6460 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6462 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6463 lower_omp (&tseq, ctx);
6464 gimple_seq_add_seq (&llist[0], tseq);
6466 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6467 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
6468 lower_omp (&tseq, ctx);
6469 gimple_seq_add_seq (&llist[1], tseq);
6470 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6471 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6472 if (new_vard == new_var)
6473 SET_DECL_VALUE_EXPR (new_var, lvar);
6474 else
6475 SET_DECL_VALUE_EXPR (new_vard,
6476 build_fold_addr_expr (lvar));
6477 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6478 if (x)
6479 gimplify_and_add (x, &llist[1]);
6480 break;
6482 /* If this is a reference to constant size reduction var
6483 with placeholder, we haven't emitted the initializer
6484 for it because it is undesirable if SIMD arrays are used.
6485 But if they aren't used, we need to emit the deferred
6486 initialization now. */
6487 else if (omp_privatize_by_reference (var) && is_simd)
6488 handle_simd_reference (clause_loc, new_vard, ilist);
6490 tree lab2 = NULL_TREE;
6491 if (cond)
6493 gimple *g;
6494 if (!is_parallel_ctx (ctx))
6496 tree condv = create_tmp_var (boolean_type_node);
6497 tree m = build_simple_mem_ref (cond);
6498 g = gimple_build_assign (condv, m);
6499 gimple_seq_add_stmt (ilist, g);
6500 tree lab1
6501 = create_artificial_label (UNKNOWN_LOCATION);
6502 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6503 g = gimple_build_cond (NE_EXPR, condv,
6504 boolean_false_node,
6505 lab2, lab1);
6506 gimple_seq_add_stmt (ilist, g);
6507 gimple_seq_add_stmt (ilist,
6508 gimple_build_label (lab1));
6510 g = gimple_build_assign (build_simple_mem_ref (cond),
6511 boolean_true_node);
6512 gimple_seq_add_stmt (ilist, g);
6514 x = lang_hooks.decls.omp_clause_default_ctor
6515 (c, unshare_expr (new_var),
6516 cond ? NULL_TREE
6517 : build_outer_var_ref (var, ctx));
6518 if (x)
6519 gimplify_and_add (x, ilist);
6521 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6522 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6524 if (ctx->for_simd_scan_phase)
6525 goto do_dtor;
6526 if (x || (!is_simd
6527 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)))
6529 tree nv = create_tmp_var_raw (TREE_TYPE (new_var));
6530 gimple_add_tmp_var (nv);
6531 ctx->cb.decl_map->put (new_vard, nv);
6532 x = lang_hooks.decls.omp_clause_default_ctor
6533 (c, nv, build_outer_var_ref (var, ctx));
6534 if (x)
6535 gimplify_and_add (x, ilist);
6536 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6538 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6539 tree vexpr = nv;
6540 if (new_vard != new_var)
6541 vexpr = build_fold_addr_expr (nv);
6542 SET_DECL_VALUE_EXPR (new_vard, vexpr);
6543 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6544 lower_omp (&tseq, ctx);
6545 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
6546 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
6547 gimple_seq_add_seq (ilist, tseq);
6549 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6550 if (is_simd && ctx->scan_exclusive)
6552 tree nv2
6553 = create_tmp_var_raw (TREE_TYPE (new_var));
6554 gimple_add_tmp_var (nv2);
6555 ctx->cb.decl_map->put (nv, nv2);
6556 x = lang_hooks.decls.omp_clause_default_ctor
6557 (c, nv2, build_outer_var_ref (var, ctx));
6558 gimplify_and_add (x, ilist);
6559 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
6560 if (x)
6561 gimplify_and_add (x, dlist);
6563 x = lang_hooks.decls.omp_clause_dtor (c, nv);
6564 if (x)
6565 gimplify_and_add (x, dlist);
6567 else if (is_simd
6568 && ctx->scan_exclusive
6569 && TREE_ADDRESSABLE (TREE_TYPE (new_var)))
6571 tree nv2 = create_tmp_var_raw (TREE_TYPE (new_var));
6572 gimple_add_tmp_var (nv2);
6573 ctx->cb.decl_map->put (new_vard, nv2);
6574 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
6575 if (x)
6576 gimplify_and_add (x, dlist);
6578 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6579 goto do_dtor;
6582 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6584 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6585 if (c_kind == OMP_CLAUSE_IN_REDUCTION
6586 && is_omp_target (ctx->stmt))
6588 tree d = maybe_lookup_decl_in_outer_ctx (var, ctx);
6589 tree oldv = NULL_TREE;
6590 gcc_assert (d);
6591 if (DECL_HAS_VALUE_EXPR_P (d))
6592 oldv = DECL_VALUE_EXPR (d);
6593 SET_DECL_VALUE_EXPR (d, new_vard);
6594 DECL_HAS_VALUE_EXPR_P (d) = 1;
6595 lower_omp (&tseq, ctx);
6596 if (oldv)
6597 SET_DECL_VALUE_EXPR (d, oldv);
6598 else
6600 SET_DECL_VALUE_EXPR (d, NULL_TREE);
6601 DECL_HAS_VALUE_EXPR_P (d) = 0;
6604 else
6605 lower_omp (&tseq, ctx);
6606 gimple_seq_add_seq (ilist, tseq);
6608 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6609 if (is_simd)
6611 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
6612 lower_omp (&tseq, ctx);
6613 gimple_seq_add_seq (dlist, tseq);
6614 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6616 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6617 if (cond)
6619 if (lab2)
6620 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
6621 break;
6623 goto do_dtor;
6625 else
6627 x = omp_reduction_init (c, TREE_TYPE (new_var));
6628 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
6629 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
6631 if (cond)
6633 gimple *g;
6634 tree lab2 = NULL_TREE;
6635 /* GOMP_taskgroup_reduction_register memsets the whole
6636 array to zero. If the initializer is zero, we don't
6637 need to initialize it again, just mark it as ever
6638 used unconditionally, i.e. cond = true. */
6639 if (initializer_zerop (x))
6641 g = gimple_build_assign (build_simple_mem_ref (cond),
6642 boolean_true_node);
6643 gimple_seq_add_stmt (ilist, g);
6644 break;
6647 /* Otherwise, emit
6648 if (!cond) { cond = true; new_var = x; } */
6649 if (!is_parallel_ctx (ctx))
6651 tree condv = create_tmp_var (boolean_type_node);
6652 tree m = build_simple_mem_ref (cond);
6653 g = gimple_build_assign (condv, m);
6654 gimple_seq_add_stmt (ilist, g);
6655 tree lab1
6656 = create_artificial_label (UNKNOWN_LOCATION);
6657 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6658 g = gimple_build_cond (NE_EXPR, condv,
6659 boolean_false_node,
6660 lab2, lab1);
6661 gimple_seq_add_stmt (ilist, g);
6662 gimple_seq_add_stmt (ilist,
6663 gimple_build_label (lab1));
6665 g = gimple_build_assign (build_simple_mem_ref (cond),
6666 boolean_true_node);
6667 gimple_seq_add_stmt (ilist, g);
6668 gimplify_assign (new_var, x, ilist);
6669 if (lab2)
6670 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
6671 break;
6674 /* reduction(-:var) sums up the partial results, so it
6675 acts identically to reduction(+:var). */
6676 if (code == MINUS_EXPR)
6677 code = PLUS_EXPR;
6679 bool is_truth_op
6680 = (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR);
6681 tree new_vard = new_var;
6682 if (is_simd && omp_privatize_by_reference (var))
6684 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6685 new_vard = TREE_OPERAND (new_var, 0);
6686 gcc_assert (DECL_P (new_vard));
6688 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
6689 if (is_simd
6690 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6691 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6692 rvarp = &rvar;
6693 if (is_simd
6694 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6695 ivar, lvar, rvarp,
6696 &rvar2))
6698 if (new_vard != new_var)
6700 SET_DECL_VALUE_EXPR (new_vard,
6701 build_fold_addr_expr (lvar));
6702 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6705 tree ref = build_outer_var_ref (var, ctx);
6707 if (rvarp)
6709 if (ctx->for_simd_scan_phase)
6710 break;
6711 gimplify_assign (ivar, ref, &llist[0]);
6712 ref = build_outer_var_ref (var, ctx);
6713 gimplify_assign (ref, rvar, &llist[3]);
6714 break;
6717 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
6719 if (sctx.is_simt)
6721 if (!simt_lane)
6722 simt_lane = create_tmp_var (unsigned_type_node);
6723 x = build_call_expr_internal_loc
6724 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
6725 TREE_TYPE (ivar), 2, ivar, simt_lane);
6726 /* Make sure x is evaluated unconditionally. */
6727 tree bfly_var = create_tmp_var (TREE_TYPE (ivar));
6728 gimplify_assign (bfly_var, x, &llist[2]);
6729 x = build2 (code, TREE_TYPE (ivar), ivar, bfly_var);
6730 gimplify_assign (ivar, x, &llist[2]);
6732 tree ivar2 = ivar;
6733 tree ref2 = ref;
6734 if (is_truth_op)
6736 tree zero = build_zero_cst (TREE_TYPE (ivar));
6737 ivar2 = fold_build2_loc (clause_loc, NE_EXPR,
6738 boolean_type_node, ivar,
6739 zero);
6740 ref2 = fold_build2_loc (clause_loc, NE_EXPR,
6741 boolean_type_node, ref,
6742 zero);
6744 x = build2 (code, TREE_TYPE (ref), ref2, ivar2);
6745 if (is_truth_op)
6746 x = fold_convert (TREE_TYPE (ref), x);
6747 ref = build_outer_var_ref (var, ctx);
6748 gimplify_assign (ref, x, &llist[1]);
6751 else
6753 lower_private_allocate (var, new_var, allocator,
6754 allocate_ptr, ilist, ctx,
6755 false, NULL_TREE);
6756 if (omp_privatize_by_reference (var) && is_simd)
6757 handle_simd_reference (clause_loc, new_vard, ilist);
6758 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6759 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6760 break;
6761 gimplify_assign (new_var, x, ilist);
6762 if (is_simd)
6764 tree ref = build_outer_var_ref (var, ctx);
6765 tree new_var2 = new_var;
6766 tree ref2 = ref;
6767 if (is_truth_op)
6769 tree zero = build_zero_cst (TREE_TYPE (new_var));
6770 new_var2
6771 = fold_build2_loc (clause_loc, NE_EXPR,
6772 boolean_type_node, new_var,
6773 zero);
6774 ref2 = fold_build2_loc (clause_loc, NE_EXPR,
6775 boolean_type_node, ref,
6776 zero);
6778 x = build2 (code, TREE_TYPE (ref2), ref2, new_var2);
6779 if (is_truth_op)
6780 x = fold_convert (TREE_TYPE (new_var), x);
6781 ref = build_outer_var_ref (var, ctx);
6782 gimplify_assign (ref, x, dlist);
6784 if (allocator)
6785 goto do_dtor;
6788 break;
6790 default:
6791 gcc_unreachable ();
6795 if (tskred_avar)
6797 tree clobber = build_clobber (TREE_TYPE (tskred_avar));
6798 gimple_seq_add_stmt (ilist, gimple_build_assign (tskred_avar, clobber));
6801 if (known_eq (sctx.max_vf, 1U))
6803 sctx.is_simt = false;
6804 if (ctx->lastprivate_conditional_map)
6806 if (gimple_omp_for_combined_into_p (ctx->stmt))
6808 /* Signal to lower_omp_1 that it should use parent context. */
6809 ctx->combined_into_simd_safelen1 = true;
6810 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6811 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6812 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6814 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6815 omp_context *outer = ctx->outer;
6816 if (gimple_code (outer->stmt) == GIMPLE_OMP_SCAN)
6817 outer = outer->outer;
6818 tree *v = ctx->lastprivate_conditional_map->get (o);
6819 tree po = lookup_decl (OMP_CLAUSE_DECL (c), outer);
6820 tree *pv = outer->lastprivate_conditional_map->get (po);
6821 *v = *pv;
6824 else
6826 /* When not vectorized, treat lastprivate(conditional:) like
6827 normal lastprivate, as there will be just one simd lane
6828 writing the privatized variable. */
6829 delete ctx->lastprivate_conditional_map;
6830 ctx->lastprivate_conditional_map = NULL;
6835 if (nonconst_simd_if)
6837 if (sctx.lane == NULL_TREE)
6839 sctx.idx = create_tmp_var (unsigned_type_node);
6840 sctx.lane = create_tmp_var (unsigned_type_node);
6842 /* FIXME: For now. */
6843 sctx.is_simt = false;
6846 if (sctx.lane || sctx.is_simt)
6848 uid = create_tmp_var (ptr_type_node, "simduid");
6849 /* Don't want uninit warnings on simduid, it is always uninitialized,
6850 but we use it not for the value, but for the DECL_UID only. */
6851 suppress_warning (uid, OPT_Wuninitialized);
6852 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
6853 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
6854 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6855 gimple_omp_for_set_clauses (ctx->stmt, c);
6857 /* Emit calls denoting privatized variables and initializing a pointer to
6858 structure that holds private variables as fields after ompdevlow pass. */
6859 if (sctx.is_simt)
6861 sctx.simt_eargs[0] = uid;
6862 gimple *g
6863 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
6864 gimple_call_set_lhs (g, uid);
6865 gimple_seq_add_stmt (ilist, g);
6866 sctx.simt_eargs.release ();
6868 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
6869 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
6870 gimple_call_set_lhs (g, simtrec);
6871 gimple_seq_add_stmt (ilist, g);
6873 if (sctx.lane)
6875 gimple *g = gimple_build_call_internal (IFN_GOMP_SIMD_LANE,
6876 2 + (nonconst_simd_if != NULL),
6877 uid, integer_zero_node,
6878 nonconst_simd_if);
6879 gimple_call_set_lhs (g, sctx.lane);
6880 gimple_stmt_iterator gsi = gsi_start (*gimple_omp_body_ptr (ctx->stmt));
6881 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
6882 g = gimple_build_assign (sctx.lane, INTEGER_CST,
6883 build_int_cst (unsigned_type_node, 0));
6884 gimple_seq_add_stmt (ilist, g);
6885 if (sctx.lastlane)
6887 g = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6888 2, uid, sctx.lane);
6889 gimple_call_set_lhs (g, sctx.lastlane);
6890 gimple_seq_add_stmt (dlist, g);
6891 gimple_seq_add_seq (dlist, llist[3]);
6893 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
6894 if (llist[2])
6896 tree simt_vf = create_tmp_var (unsigned_type_node);
6897 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
6898 gimple_call_set_lhs (g, simt_vf);
6899 gimple_seq_add_stmt (dlist, g);
6901 tree t = build_int_cst (unsigned_type_node, 1);
6902 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
6903 gimple_seq_add_stmt (dlist, g);
6905 t = build_int_cst (unsigned_type_node, 0);
6906 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6907 gimple_seq_add_stmt (dlist, g);
6909 tree body = create_artificial_label (UNKNOWN_LOCATION);
6910 tree header = create_artificial_label (UNKNOWN_LOCATION);
6911 tree end = create_artificial_label (UNKNOWN_LOCATION);
6912 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
6913 gimple_seq_add_stmt (dlist, gimple_build_label (body));
6915 gimple_seq_add_seq (dlist, llist[2]);
6917 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
6918 gimple_seq_add_stmt (dlist, g);
6920 gimple_seq_add_stmt (dlist, gimple_build_label (header));
6921 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
6922 gimple_seq_add_stmt (dlist, g);
6924 gimple_seq_add_stmt (dlist, gimple_build_label (end));
6926 for (int i = 0; i < 2; i++)
6927 if (llist[i])
6929 tree vf = create_tmp_var (unsigned_type_node);
6930 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
6931 gimple_call_set_lhs (g, vf);
6932 gimple_seq *seq = i == 0 ? ilist : dlist;
6933 gimple_seq_add_stmt (seq, g);
6934 tree t = build_int_cst (unsigned_type_node, 0);
6935 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6936 gimple_seq_add_stmt (seq, g);
6937 tree body = create_artificial_label (UNKNOWN_LOCATION);
6938 tree header = create_artificial_label (UNKNOWN_LOCATION);
6939 tree end = create_artificial_label (UNKNOWN_LOCATION);
6940 gimple_seq_add_stmt (seq, gimple_build_goto (header));
6941 gimple_seq_add_stmt (seq, gimple_build_label (body));
6942 gimple_seq_add_seq (seq, llist[i]);
6943 t = build_int_cst (unsigned_type_node, 1);
6944 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
6945 gimple_seq_add_stmt (seq, g);
6946 gimple_seq_add_stmt (seq, gimple_build_label (header));
6947 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
6948 gimple_seq_add_stmt (seq, g);
6949 gimple_seq_add_stmt (seq, gimple_build_label (end));
6952 if (sctx.is_simt)
6954 gimple_seq_add_seq (dlist, sctx.simt_dlist);
6955 gimple *g
6956 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
6957 gimple_seq_add_stmt (dlist, g);
6960 /* The copyin sequence is not to be executed by the main thread, since
6961 that would result in self-copies. Perhaps not visible to scalars,
6962 but it certainly is to C++ operator=. */
6963 if (copyin_seq)
6965 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
6967 x = build2 (NE_EXPR, boolean_type_node, x,
6968 build_int_cst (TREE_TYPE (x), 0));
6969 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
6970 gimplify_and_add (x, ilist);
6973 /* If any copyin variable is passed by reference, we must ensure the
6974 master thread doesn't modify it before it is copied over in all
6975 threads. Similarly for variables in both firstprivate and
6976 lastprivate clauses we need to ensure the lastprivate copying
6977 happens after firstprivate copying in all threads. And similarly
6978 for UDRs if initializer expression refers to omp_orig. */
6979 if (copyin_by_ref || lastprivate_firstprivate
6980 || (reduction_omp_orig_ref
6981 && !ctx->scan_inclusive
6982 && !ctx->scan_exclusive))
6984 /* Don't add any barrier for #pragma omp simd or
6985 #pragma omp distribute. */
6986 if (!is_task_ctx (ctx)
6987 && (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
6988 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR))
6989 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
6992 /* If max_vf is non-zero, then we can use only a vectorization factor
6993 up to the max_vf we chose. So stick it into the safelen clause. */
6994 if (maybe_ne (sctx.max_vf, 0U))
6996 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
6997 OMP_CLAUSE_SAFELEN);
6998 poly_uint64 safe_len;
6999 if (c == NULL_TREE
7000 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
7001 && maybe_gt (safe_len, sctx.max_vf)))
7003 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
7004 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
7005 sctx.max_vf);
7006 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
7007 gimple_omp_for_set_clauses (ctx->stmt, c);
7012 /* Create temporary variables for lastprivate(conditional:) implementation
7013 in context CTX with CLAUSES. */
7015 static void
7016 lower_lastprivate_conditional_clauses (tree *clauses, omp_context *ctx)
7018 tree iter_type = NULL_TREE;
7019 tree cond_ptr = NULL_TREE;
7020 tree iter_var = NULL_TREE;
7021 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
7022 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
7023 tree next = *clauses;
7024 for (tree c = *clauses; c; c = OMP_CLAUSE_CHAIN (c))
7025 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7026 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
7028 if (is_simd)
7030 tree cc = omp_find_clause (next, OMP_CLAUSE__CONDTEMP_);
7031 gcc_assert (cc);
7032 if (iter_type == NULL_TREE)
7034 iter_type = TREE_TYPE (OMP_CLAUSE_DECL (cc));
7035 iter_var = create_tmp_var_raw (iter_type);
7036 DECL_CONTEXT (iter_var) = current_function_decl;
7037 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
7038 DECL_CHAIN (iter_var) = ctx->block_vars;
7039 ctx->block_vars = iter_var;
7040 tree c3
7041 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
7042 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
7043 OMP_CLAUSE_DECL (c3) = iter_var;
7044 OMP_CLAUSE_CHAIN (c3) = *clauses;
7045 *clauses = c3;
7046 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
7048 next = OMP_CLAUSE_CHAIN (cc);
7049 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
7050 tree v = lookup_decl (OMP_CLAUSE_DECL (cc), ctx);
7051 ctx->lastprivate_conditional_map->put (o, v);
7052 continue;
7054 if (iter_type == NULL)
7056 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR)
7058 struct omp_for_data fd;
7059 omp_extract_for_data (as_a <gomp_for *> (ctx->stmt), &fd,
7060 NULL);
7061 iter_type = unsigned_type_for (fd.iter_type);
7063 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
7064 iter_type = unsigned_type_node;
7065 tree c2 = omp_find_clause (*clauses, OMP_CLAUSE__CONDTEMP_);
7066 if (c2)
7068 cond_ptr
7069 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2), ctx);
7070 OMP_CLAUSE_DECL (c2) = cond_ptr;
7072 else
7074 cond_ptr = create_tmp_var_raw (build_pointer_type (iter_type));
7075 DECL_CONTEXT (cond_ptr) = current_function_decl;
7076 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr) = 1;
7077 DECL_CHAIN (cond_ptr) = ctx->block_vars;
7078 ctx->block_vars = cond_ptr;
7079 c2 = build_omp_clause (UNKNOWN_LOCATION,
7080 OMP_CLAUSE__CONDTEMP_);
7081 OMP_CLAUSE_DECL (c2) = cond_ptr;
7082 OMP_CLAUSE_CHAIN (c2) = *clauses;
7083 *clauses = c2;
7085 iter_var = create_tmp_var_raw (iter_type);
7086 DECL_CONTEXT (iter_var) = current_function_decl;
7087 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
7088 DECL_CHAIN (iter_var) = ctx->block_vars;
7089 ctx->block_vars = iter_var;
7090 tree c3
7091 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
7092 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
7093 OMP_CLAUSE_DECL (c3) = iter_var;
7094 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (c2);
7095 OMP_CLAUSE_CHAIN (c2) = c3;
7096 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
7098 tree v = create_tmp_var_raw (iter_type);
7099 DECL_CONTEXT (v) = current_function_decl;
7100 DECL_SEEN_IN_BIND_EXPR_P (v) = 1;
7101 DECL_CHAIN (v) = ctx->block_vars;
7102 ctx->block_vars = v;
7103 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
7104 ctx->lastprivate_conditional_map->put (o, v);
7109 /* Generate code to implement the LASTPRIVATE clauses. This is used for
7110 both parallel and workshare constructs. PREDICATE may be NULL if it's
7111 always true. BODY_P is the sequence to insert early initialization
7112 if needed, STMT_LIST is where the non-conditional lastprivate handling
7113 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
7114 section. */
7116 static void
7117 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *body_p,
7118 gimple_seq *stmt_list, gimple_seq *cstmt_list,
7119 omp_context *ctx)
7121 tree x, c, label = NULL, orig_clauses = clauses;
7122 bool par_clauses = false;
7123 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
7124 unsigned HOST_WIDE_INT conditional_off = 0;
7125 gimple_seq post_stmt_list = NULL;
7127 /* Early exit if there are no lastprivate or linear clauses. */
7128 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
7129 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
7130 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
7131 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
7132 break;
7133 if (clauses == NULL)
7135 /* If this was a workshare clause, see if it had been combined
7136 with its parallel. In that case, look for the clauses on the
7137 parallel statement itself. */
7138 if (is_parallel_ctx (ctx))
7139 return;
7141 ctx = ctx->outer;
7142 if (ctx == NULL || !is_parallel_ctx (ctx))
7143 return;
7145 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
7146 OMP_CLAUSE_LASTPRIVATE);
7147 if (clauses == NULL)
7148 return;
7149 par_clauses = true;
7152 bool maybe_simt = false;
7153 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
7154 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
7156 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
7157 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
7158 if (simduid)
7159 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
7162 if (predicate)
7164 gcond *stmt;
7165 tree label_true, arm1, arm2;
7166 enum tree_code pred_code = TREE_CODE (predicate);
7168 label = create_artificial_label (UNKNOWN_LOCATION);
7169 label_true = create_artificial_label (UNKNOWN_LOCATION);
7170 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
7172 arm1 = TREE_OPERAND (predicate, 0);
7173 arm2 = TREE_OPERAND (predicate, 1);
7174 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
7175 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
7177 else
7179 arm1 = predicate;
7180 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
7181 arm2 = boolean_false_node;
7182 pred_code = NE_EXPR;
7184 if (maybe_simt)
7186 c = build2 (pred_code, boolean_type_node, arm1, arm2);
7187 c = fold_convert (integer_type_node, c);
7188 simtcond = create_tmp_var (integer_type_node);
7189 gimplify_assign (simtcond, c, stmt_list);
7190 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
7191 1, simtcond);
7192 c = create_tmp_var (integer_type_node);
7193 gimple_call_set_lhs (g, c);
7194 gimple_seq_add_stmt (stmt_list, g);
7195 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
7196 label_true, label);
7198 else
7199 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
7200 gimple_seq_add_stmt (stmt_list, stmt);
7201 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
7204 tree cond_ptr = NULL_TREE;
7205 for (c = clauses; c ;)
7207 tree var, new_var;
7208 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7209 gimple_seq *this_stmt_list = stmt_list;
7210 tree lab2 = NULL_TREE;
7212 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7213 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
7214 && ctx->lastprivate_conditional_map
7215 && !ctx->combined_into_simd_safelen1)
7217 gcc_assert (body_p);
7218 if (simduid)
7219 goto next;
7220 if (cond_ptr == NULL_TREE)
7222 cond_ptr = omp_find_clause (orig_clauses, OMP_CLAUSE__CONDTEMP_);
7223 cond_ptr = OMP_CLAUSE_DECL (cond_ptr);
7225 tree type = TREE_TYPE (TREE_TYPE (cond_ptr));
7226 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
7227 tree v = *ctx->lastprivate_conditional_map->get (o);
7228 gimplify_assign (v, build_zero_cst (type), body_p);
7229 this_stmt_list = cstmt_list;
7230 tree mem;
7231 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr)))
7233 mem = build2 (MEM_REF, type, cond_ptr,
7234 build_int_cst (TREE_TYPE (cond_ptr),
7235 conditional_off));
7236 conditional_off += tree_to_uhwi (TYPE_SIZE_UNIT (type));
7238 else
7239 mem = build4 (ARRAY_REF, type, cond_ptr,
7240 size_int (conditional_off++), NULL_TREE, NULL_TREE);
7241 tree mem2 = copy_node (mem);
7242 gimple_seq seq = NULL;
7243 mem = force_gimple_operand (mem, &seq, true, NULL_TREE);
7244 gimple_seq_add_seq (this_stmt_list, seq);
7245 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
7246 lab2 = create_artificial_label (UNKNOWN_LOCATION);
7247 gimple *g = gimple_build_cond (GT_EXPR, v, mem, lab1, lab2);
7248 gimple_seq_add_stmt (this_stmt_list, g);
7249 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab1));
7250 gimplify_assign (mem2, v, this_stmt_list);
7252 else if (predicate
7253 && ctx->combined_into_simd_safelen1
7254 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7255 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
7256 && ctx->lastprivate_conditional_map)
7257 this_stmt_list = &post_stmt_list;
7259 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7260 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7261 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
7263 var = OMP_CLAUSE_DECL (c);
7264 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7265 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
7266 && is_taskloop_ctx (ctx))
7268 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
7269 new_var = lookup_decl (var, ctx->outer);
7271 else
7273 new_var = lookup_decl (var, ctx);
7274 /* Avoid uninitialized warnings for lastprivate and
7275 for linear iterators. */
7276 if (predicate
7277 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7278 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
7279 suppress_warning (new_var, OPT_Wuninitialized);
7282 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
7284 tree val = DECL_VALUE_EXPR (new_var);
7285 if (TREE_CODE (val) == ARRAY_REF
7286 && VAR_P (TREE_OPERAND (val, 0))
7287 && lookup_attribute ("omp simd array",
7288 DECL_ATTRIBUTES (TREE_OPERAND (val,
7289 0))))
7291 if (lastlane == NULL)
7293 lastlane = create_tmp_var (unsigned_type_node);
7294 gcall *g
7295 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
7296 2, simduid,
7297 TREE_OPERAND (val, 1));
7298 gimple_call_set_lhs (g, lastlane);
7299 gimple_seq_add_stmt (this_stmt_list, g);
7301 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
7302 TREE_OPERAND (val, 0), lastlane,
7303 NULL_TREE, NULL_TREE);
7304 TREE_THIS_NOTRAP (new_var) = 1;
7307 else if (maybe_simt)
7309 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
7310 ? DECL_VALUE_EXPR (new_var)
7311 : new_var);
7312 if (simtlast == NULL)
7314 simtlast = create_tmp_var (unsigned_type_node);
7315 gcall *g = gimple_build_call_internal
7316 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
7317 gimple_call_set_lhs (g, simtlast);
7318 gimple_seq_add_stmt (this_stmt_list, g);
7320 x = build_call_expr_internal_loc
7321 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
7322 TREE_TYPE (val), 2, val, simtlast);
7323 new_var = unshare_expr (new_var);
7324 gimplify_assign (new_var, x, this_stmt_list);
7325 new_var = unshare_expr (new_var);
7328 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7329 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
7331 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
7332 gimple_seq_add_seq (this_stmt_list,
7333 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
7334 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
7336 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7337 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
7339 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
7340 gimple_seq_add_seq (this_stmt_list,
7341 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
7342 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
7345 x = NULL_TREE;
7346 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7347 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)
7348 && is_taskloop_ctx (ctx))
7350 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
7351 ctx->outer->outer);
7352 if (is_global_var (ovar))
7353 x = ovar;
7355 if (!x)
7356 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
7357 if (omp_privatize_by_reference (var))
7358 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7359 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
7360 gimplify_and_add (x, this_stmt_list);
7362 if (lab2)
7363 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab2));
7366 next:
7367 c = OMP_CLAUSE_CHAIN (c);
7368 if (c == NULL && !par_clauses)
7370 /* If this was a workshare clause, see if it had been combined
7371 with its parallel. In that case, continue looking for the
7372 clauses also on the parallel statement itself. */
7373 if (is_parallel_ctx (ctx))
7374 break;
7376 ctx = ctx->outer;
7377 if (ctx == NULL || !is_parallel_ctx (ctx))
7378 break;
7380 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
7381 OMP_CLAUSE_LASTPRIVATE);
7382 par_clauses = true;
7386 if (label)
7387 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
7388 gimple_seq_add_seq (stmt_list, post_stmt_list);
7391 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
7392 (which might be a placeholder). INNER is true if this is an inner
7393 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
7394 join markers. Generate the before-loop forking sequence in
7395 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
7396 general form of these sequences is
7398 GOACC_REDUCTION_SETUP
7399 GOACC_FORK
7400 GOACC_REDUCTION_INIT
7402 GOACC_REDUCTION_FINI
7403 GOACC_JOIN
7404 GOACC_REDUCTION_TEARDOWN. */
7406 static void
7407 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
7408 gcall *fork, gcall *private_marker, gcall *join,
7409 gimple_seq *fork_seq, gimple_seq *join_seq,
7410 omp_context *ctx)
7412 gimple_seq before_fork = NULL;
7413 gimple_seq after_fork = NULL;
7414 gimple_seq before_join = NULL;
7415 gimple_seq after_join = NULL;
7416 tree init_code = NULL_TREE, fini_code = NULL_TREE,
7417 setup_code = NULL_TREE, teardown_code = NULL_TREE;
7418 unsigned offset = 0;
7420 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7421 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
7423 /* No 'reduction' clauses on OpenACC 'kernels'. */
7424 gcc_checking_assert (!is_oacc_kernels (ctx));
7425 /* Likewise, on OpenACC 'kernels' decomposed parts. */
7426 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
7428 tree orig = OMP_CLAUSE_DECL (c);
7429 tree var = maybe_lookup_decl (orig, ctx);
7430 tree ref_to_res = NULL_TREE;
7431 tree incoming, outgoing, v1, v2, v3;
7432 bool is_private = false;
7434 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
7435 if (rcode == MINUS_EXPR)
7436 rcode = PLUS_EXPR;
7437 else if (rcode == TRUTH_ANDIF_EXPR)
7438 rcode = BIT_AND_EXPR;
7439 else if (rcode == TRUTH_ORIF_EXPR)
7440 rcode = BIT_IOR_EXPR;
7441 tree op = build_int_cst (unsigned_type_node, rcode);
7443 if (!var)
7444 var = orig;
7446 incoming = outgoing = var;
7448 if (!inner)
7450 /* See if an outer construct also reduces this variable. */
7451 omp_context *outer = ctx;
7453 while (omp_context *probe = outer->outer)
7455 enum gimple_code type = gimple_code (probe->stmt);
7456 tree cls;
7458 switch (type)
7460 case GIMPLE_OMP_FOR:
7461 cls = gimple_omp_for_clauses (probe->stmt);
7462 break;
7464 case GIMPLE_OMP_TARGET:
7465 /* No 'reduction' clauses inside OpenACC 'kernels'
7466 regions. */
7467 gcc_checking_assert (!is_oacc_kernels (probe));
7469 if (!is_gimple_omp_offloaded (probe->stmt))
7470 goto do_lookup;
7472 cls = gimple_omp_target_clauses (probe->stmt);
7473 break;
7475 default:
7476 goto do_lookup;
7479 outer = probe;
7480 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
7481 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
7482 && orig == OMP_CLAUSE_DECL (cls))
7484 incoming = outgoing = lookup_decl (orig, probe);
7485 goto has_outer_reduction;
7487 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
7488 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
7489 && orig == OMP_CLAUSE_DECL (cls))
7491 is_private = true;
7492 goto do_lookup;
7496 do_lookup:
7497 /* This is the outermost construct with this reduction,
7498 see if there's a mapping for it. */
7499 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
7500 && maybe_lookup_field (orig, outer) && !is_private)
7502 ref_to_res = build_receiver_ref (orig, false, outer);
7503 if (omp_privatize_by_reference (orig))
7504 ref_to_res = build_simple_mem_ref (ref_to_res);
7506 tree type = TREE_TYPE (var);
7507 if (POINTER_TYPE_P (type))
7508 type = TREE_TYPE (type);
7510 outgoing = var;
7511 incoming = omp_reduction_init_op (loc, rcode, type);
7513 else
7515 /* Try to look at enclosing contexts for reduction var,
7516 use original if no mapping found. */
7517 tree t = NULL_TREE;
7518 omp_context *c = ctx->outer;
7519 while (c && !t)
7521 t = maybe_lookup_decl (orig, c);
7522 c = c->outer;
7524 incoming = outgoing = (t ? t : orig);
7527 has_outer_reduction:;
7530 if (!ref_to_res)
7531 ref_to_res = integer_zero_node;
7533 if (omp_privatize_by_reference (orig))
7535 tree type = TREE_TYPE (var);
7536 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
7538 if (!inner)
7540 tree x = create_tmp_var (TREE_TYPE (type), id);
7541 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
7544 v1 = create_tmp_var (type, id);
7545 v2 = create_tmp_var (type, id);
7546 v3 = create_tmp_var (type, id);
7548 gimplify_assign (v1, var, fork_seq);
7549 gimplify_assign (v2, var, fork_seq);
7550 gimplify_assign (v3, var, fork_seq);
7552 var = build_simple_mem_ref (var);
7553 v1 = build_simple_mem_ref (v1);
7554 v2 = build_simple_mem_ref (v2);
7555 v3 = build_simple_mem_ref (v3);
7556 outgoing = build_simple_mem_ref (outgoing);
7558 if (!TREE_CONSTANT (incoming))
7559 incoming = build_simple_mem_ref (incoming);
7561 else
7562 /* Note that 'var' might be a mem ref. */
7563 v1 = v2 = v3 = var;
7565 /* Determine position in reduction buffer, which may be used
7566 by target. The parser has ensured that this is not a
7567 variable-sized type. */
7568 fixed_size_mode mode
7569 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
7570 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7571 offset = (offset + align - 1) & ~(align - 1);
7572 tree off = build_int_cst (sizetype, offset);
7573 offset += GET_MODE_SIZE (mode);
7575 if (!init_code)
7577 init_code = build_int_cst (integer_type_node,
7578 IFN_GOACC_REDUCTION_INIT);
7579 fini_code = build_int_cst (integer_type_node,
7580 IFN_GOACC_REDUCTION_FINI);
7581 setup_code = build_int_cst (integer_type_node,
7582 IFN_GOACC_REDUCTION_SETUP);
7583 teardown_code = build_int_cst (integer_type_node,
7584 IFN_GOACC_REDUCTION_TEARDOWN);
7587 tree setup_call
7588 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7589 TREE_TYPE (var), 6, setup_code,
7590 unshare_expr (ref_to_res),
7591 unshare_expr (incoming),
7592 level, op, off);
7593 tree init_call
7594 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7595 TREE_TYPE (var), 6, init_code,
7596 unshare_expr (ref_to_res),
7597 unshare_expr (v1), level, op, off);
7598 tree fini_call
7599 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7600 TREE_TYPE (var), 6, fini_code,
7601 unshare_expr (ref_to_res),
7602 unshare_expr (v2), level, op, off);
7603 tree teardown_call
7604 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7605 TREE_TYPE (var), 6, teardown_code,
7606 ref_to_res, unshare_expr (v3),
7607 level, op, off);
7609 gimplify_assign (unshare_expr (v1), setup_call, &before_fork);
7610 gimplify_assign (unshare_expr (v2), init_call, &after_fork);
7611 gimplify_assign (unshare_expr (v3), fini_call, &before_join);
7612 gimplify_assign (unshare_expr (outgoing), teardown_call, &after_join);
7615 /* Now stitch things together. */
7616 gimple_seq_add_seq (fork_seq, before_fork);
7617 if (private_marker)
7618 gimple_seq_add_stmt (fork_seq, private_marker);
7619 if (fork)
7620 gimple_seq_add_stmt (fork_seq, fork);
7621 gimple_seq_add_seq (fork_seq, after_fork);
7623 gimple_seq_add_seq (join_seq, before_join);
7624 if (join)
7625 gimple_seq_add_stmt (join_seq, join);
7626 gimple_seq_add_seq (join_seq, after_join);
7629 /* Generate code to implement the REDUCTION clauses, append it
7630 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
7631 that should be emitted also inside of the critical section,
7632 in that case clear *CLIST afterwards, otherwise leave it as is
7633 and let the caller emit it itself. */
7635 static void
7636 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp,
7637 gimple_seq *clist, omp_context *ctx)
7639 gimple_seq sub_seq = NULL;
7640 gimple *stmt;
7641 tree x, c;
7642 int count = 0;
7644 /* OpenACC loop reductions are handled elsewhere. */
7645 if (is_gimple_omp_oacc (ctx->stmt))
7646 return;
7648 /* SIMD reductions are handled in lower_rec_input_clauses. */
7649 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
7650 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
7651 return;
7653 /* inscan reductions are handled elsewhere. */
7654 if (ctx->scan_inclusive || ctx->scan_exclusive)
7655 return;
7657 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
7658 update in that case, otherwise use a lock. */
7659 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
7660 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7661 && !OMP_CLAUSE_REDUCTION_TASK (c))
7663 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
7664 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
7666 /* Never use OMP_ATOMIC for array reductions or UDRs. */
7667 count = -1;
7668 break;
7670 count++;
7673 if (count == 0)
7674 return;
7676 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7678 tree var, ref, new_var, orig_var;
7679 enum tree_code code;
7680 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7682 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7683 || OMP_CLAUSE_REDUCTION_TASK (c))
7684 continue;
7686 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
7687 orig_var = var = OMP_CLAUSE_DECL (c);
7688 if (TREE_CODE (var) == MEM_REF)
7690 var = TREE_OPERAND (var, 0);
7691 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
7692 var = TREE_OPERAND (var, 0);
7693 if (TREE_CODE (var) == ADDR_EXPR)
7694 var = TREE_OPERAND (var, 0);
7695 else
7697 /* If this is a pointer or referenced based array
7698 section, the var could be private in the outer
7699 context e.g. on orphaned loop construct. Pretend this
7700 is private variable's outer reference. */
7701 ccode = OMP_CLAUSE_PRIVATE;
7702 if (INDIRECT_REF_P (var))
7703 var = TREE_OPERAND (var, 0);
7705 orig_var = var;
7706 if (is_variable_sized (var))
7708 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
7709 var = DECL_VALUE_EXPR (var);
7710 gcc_assert (INDIRECT_REF_P (var));
7711 var = TREE_OPERAND (var, 0);
7712 gcc_assert (DECL_P (var));
7715 new_var = lookup_decl (var, ctx);
7716 if (var == OMP_CLAUSE_DECL (c)
7717 && omp_privatize_by_reference (var))
7718 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7719 ref = build_outer_var_ref (var, ctx, ccode);
7720 code = OMP_CLAUSE_REDUCTION_CODE (c);
7722 /* reduction(-:var) sums up the partial results, so it acts
7723 identically to reduction(+:var). */
7724 if (code == MINUS_EXPR)
7725 code = PLUS_EXPR;
7727 bool is_truth_op = (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR);
7728 if (count == 1)
7730 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
7732 addr = save_expr (addr);
7733 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
7734 tree new_var2 = new_var;
7735 tree ref2 = ref;
7736 if (is_truth_op)
7738 tree zero = build_zero_cst (TREE_TYPE (new_var));
7739 new_var2 = fold_build2_loc (clause_loc, NE_EXPR,
7740 boolean_type_node, new_var, zero);
7741 ref2 = fold_build2_loc (clause_loc, NE_EXPR, boolean_type_node,
7742 ref, zero);
7744 x = fold_build2_loc (clause_loc, code, TREE_TYPE (new_var2), ref2,
7745 new_var2);
7746 if (is_truth_op)
7747 x = fold_convert (TREE_TYPE (new_var), x);
7748 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
7749 OMP_ATOMIC_MEMORY_ORDER (x) = OMP_MEMORY_ORDER_RELAXED;
7750 gimplify_and_add (x, stmt_seqp);
7751 return;
7753 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
7755 tree d = OMP_CLAUSE_DECL (c);
7756 tree type = TREE_TYPE (d);
7757 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7758 tree i = create_tmp_var (TREE_TYPE (v));
7759 tree ptype = build_pointer_type (TREE_TYPE (type));
7760 tree bias = TREE_OPERAND (d, 1);
7761 d = TREE_OPERAND (d, 0);
7762 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
7764 tree b = TREE_OPERAND (d, 1);
7765 b = maybe_lookup_decl (b, ctx);
7766 if (b == NULL)
7768 b = TREE_OPERAND (d, 1);
7769 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
7771 if (integer_zerop (bias))
7772 bias = b;
7773 else
7775 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
7776 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
7777 TREE_TYPE (b), b, bias);
7779 d = TREE_OPERAND (d, 0);
7781 /* For ref build_outer_var_ref already performs this, so
7782 only new_var needs a dereference. */
7783 if (INDIRECT_REF_P (d))
7785 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7786 gcc_assert (omp_privatize_by_reference (var)
7787 && var == orig_var);
7789 else if (TREE_CODE (d) == ADDR_EXPR)
7791 if (orig_var == var)
7793 new_var = build_fold_addr_expr (new_var);
7794 ref = build_fold_addr_expr (ref);
7797 else
7799 gcc_assert (orig_var == var);
7800 if (omp_privatize_by_reference (var))
7801 ref = build_fold_addr_expr (ref);
7803 if (DECL_P (v))
7805 tree t = maybe_lookup_decl (v, ctx);
7806 if (t)
7807 v = t;
7808 else
7809 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
7810 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
7812 if (!integer_zerop (bias))
7814 bias = fold_convert_loc (clause_loc, sizetype, bias);
7815 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7816 TREE_TYPE (new_var), new_var,
7817 unshare_expr (bias));
7818 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7819 TREE_TYPE (ref), ref, bias);
7821 new_var = fold_convert_loc (clause_loc, ptype, new_var);
7822 ref = fold_convert_loc (clause_loc, ptype, ref);
7823 tree m = create_tmp_var (ptype);
7824 gimplify_assign (m, new_var, stmt_seqp);
7825 new_var = m;
7826 m = create_tmp_var (ptype);
7827 gimplify_assign (m, ref, stmt_seqp);
7828 ref = m;
7829 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
7830 tree body = create_artificial_label (UNKNOWN_LOCATION);
7831 tree end = create_artificial_label (UNKNOWN_LOCATION);
7832 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
7833 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
7834 tree out = build_simple_mem_ref_loc (clause_loc, ref);
7835 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7837 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7838 tree decl_placeholder
7839 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
7840 SET_DECL_VALUE_EXPR (placeholder, out);
7841 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7842 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
7843 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
7844 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7845 gimple_seq_add_seq (&sub_seq,
7846 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7847 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7848 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7849 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
7851 else
7853 tree out2 = out;
7854 tree priv2 = priv;
7855 if (is_truth_op)
7857 tree zero = build_zero_cst (TREE_TYPE (out));
7858 out2 = fold_build2_loc (clause_loc, NE_EXPR,
7859 boolean_type_node, out, zero);
7860 priv2 = fold_build2_loc (clause_loc, NE_EXPR,
7861 boolean_type_node, priv, zero);
7863 x = build2 (code, TREE_TYPE (out2), out2, priv2);
7864 if (is_truth_op)
7865 x = fold_convert (TREE_TYPE (out), x);
7866 out = unshare_expr (out);
7867 gimplify_assign (out, x, &sub_seq);
7869 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
7870 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7871 gimple_seq_add_stmt (&sub_seq, g);
7872 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
7873 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7874 gimple_seq_add_stmt (&sub_seq, g);
7875 g = gimple_build_assign (i, PLUS_EXPR, i,
7876 build_int_cst (TREE_TYPE (i), 1));
7877 gimple_seq_add_stmt (&sub_seq, g);
7878 g = gimple_build_cond (LE_EXPR, i, v, body, end);
7879 gimple_seq_add_stmt (&sub_seq, g);
7880 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
7882 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7884 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7886 if (omp_privatize_by_reference (var)
7887 && !useless_type_conversion_p (TREE_TYPE (placeholder),
7888 TREE_TYPE (ref)))
7889 ref = build_fold_addr_expr_loc (clause_loc, ref);
7890 SET_DECL_VALUE_EXPR (placeholder, ref);
7891 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7892 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7893 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7894 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7895 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7897 else
7899 tree new_var2 = new_var;
7900 tree ref2 = ref;
7901 if (is_truth_op)
7903 tree zero = build_zero_cst (TREE_TYPE (new_var));
7904 new_var2 = fold_build2_loc (clause_loc, NE_EXPR,
7905 boolean_type_node, new_var, zero);
7906 ref2 = fold_build2_loc (clause_loc, NE_EXPR, boolean_type_node,
7907 ref, zero);
7909 x = build2 (code, TREE_TYPE (ref), ref2, new_var2);
7910 if (is_truth_op)
7911 x = fold_convert (TREE_TYPE (new_var), x);
7912 ref = build_outer_var_ref (var, ctx);
7913 gimplify_assign (ref, x, &sub_seq);
7917 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
7919 gimple_seq_add_stmt (stmt_seqp, stmt);
7921 gimple_seq_add_seq (stmt_seqp, sub_seq);
7923 if (clist)
7925 gimple_seq_add_seq (stmt_seqp, *clist);
7926 *clist = NULL;
7929 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
7931 gimple_seq_add_stmt (stmt_seqp, stmt);
7935 /* Generate code to implement the COPYPRIVATE clauses. */
7937 static void
7938 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
7939 omp_context *ctx)
7941 tree c;
7943 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7945 tree var, new_var, ref, x;
7946 bool by_ref;
7947 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7949 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
7950 continue;
7952 var = OMP_CLAUSE_DECL (c);
7953 by_ref = use_pointer_for_field (var, NULL);
7955 ref = build_sender_ref (var, ctx);
7956 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
7957 if (by_ref)
7959 x = build_fold_addr_expr_loc (clause_loc, new_var);
7960 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
7962 gimplify_assign (ref, x, slist);
7964 ref = build_receiver_ref (var, false, ctx);
7965 if (by_ref)
7967 ref = fold_convert_loc (clause_loc,
7968 build_pointer_type (TREE_TYPE (new_var)),
7969 ref);
7970 ref = build_fold_indirect_ref_loc (clause_loc, ref);
7972 if (omp_privatize_by_reference (var))
7974 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
7975 ref = build_simple_mem_ref_loc (clause_loc, ref);
7976 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7978 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
7979 gimplify_and_add (x, rlist);
7984 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
7985 and REDUCTION from the sender (aka parent) side. */
7987 static void
7988 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
7989 omp_context *ctx)
7991 tree c, t;
7992 int ignored_looptemp = 0;
7993 bool is_taskloop = false;
7995 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
7996 by GOMP_taskloop. */
7997 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
7999 ignored_looptemp = 2;
8000 is_taskloop = true;
8003 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
8005 tree val, ref, x, var;
8006 bool by_ref, do_in = false, do_out = false;
8007 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8009 switch (OMP_CLAUSE_CODE (c))
8011 case OMP_CLAUSE_PRIVATE:
8012 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
8013 break;
8014 continue;
8015 case OMP_CLAUSE_FIRSTPRIVATE:
8016 case OMP_CLAUSE_COPYIN:
8017 case OMP_CLAUSE_LASTPRIVATE:
8018 case OMP_CLAUSE_IN_REDUCTION:
8019 case OMP_CLAUSE__REDUCTEMP_:
8020 break;
8021 case OMP_CLAUSE_REDUCTION:
8022 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
8023 continue;
8024 break;
8025 case OMP_CLAUSE_SHARED:
8026 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
8027 break;
8028 continue;
8029 case OMP_CLAUSE__LOOPTEMP_:
8030 if (ignored_looptemp)
8032 ignored_looptemp--;
8033 continue;
8035 break;
8036 default:
8037 continue;
8040 val = OMP_CLAUSE_DECL (c);
8041 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
8042 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
8043 && TREE_CODE (val) == MEM_REF)
8045 val = TREE_OPERAND (val, 0);
8046 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
8047 val = TREE_OPERAND (val, 0);
8048 if (INDIRECT_REF_P (val)
8049 || TREE_CODE (val) == ADDR_EXPR)
8050 val = TREE_OPERAND (val, 0);
8051 if (is_variable_sized (val))
8052 continue;
8055 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
8056 outer taskloop region. */
8057 omp_context *ctx_for_o = ctx;
8058 if (is_taskloop
8059 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
8060 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
8061 ctx_for_o = ctx->outer;
8063 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
8065 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
8066 && is_global_var (var)
8067 && (val == OMP_CLAUSE_DECL (c)
8068 || !is_task_ctx (ctx)
8069 || (TREE_CODE (TREE_TYPE (val)) != POINTER_TYPE
8070 && (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE
8071 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val)))
8072 != POINTER_TYPE)))))
8073 continue;
8075 t = omp_member_access_dummy_var (var);
8076 if (t)
8078 var = DECL_VALUE_EXPR (var);
8079 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
8080 if (o != t)
8081 var = unshare_and_remap (var, t, o);
8082 else
8083 var = unshare_expr (var);
8086 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
8088 /* Handle taskloop firstprivate/lastprivate, where the
8089 lastprivate on GIMPLE_OMP_TASK is represented as
8090 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
8091 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
8092 x = omp_build_component_ref (ctx->sender_decl, f);
8093 if (use_pointer_for_field (val, ctx))
8094 var = build_fold_addr_expr (var);
8095 gimplify_assign (x, var, ilist);
8096 DECL_ABSTRACT_ORIGIN (f) = NULL;
8097 continue;
8100 if (((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
8101 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION)
8102 || val == OMP_CLAUSE_DECL (c))
8103 && is_variable_sized (val))
8104 continue;
8105 by_ref = use_pointer_for_field (val, NULL);
8107 switch (OMP_CLAUSE_CODE (c))
8109 case OMP_CLAUSE_FIRSTPRIVATE:
8110 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
8111 && !by_ref
8112 && is_task_ctx (ctx))
8113 suppress_warning (var);
8114 do_in = true;
8115 break;
8117 case OMP_CLAUSE_PRIVATE:
8118 case OMP_CLAUSE_COPYIN:
8119 case OMP_CLAUSE__LOOPTEMP_:
8120 case OMP_CLAUSE__REDUCTEMP_:
8121 do_in = true;
8122 break;
8124 case OMP_CLAUSE_LASTPRIVATE:
8125 if (by_ref || omp_privatize_by_reference (val))
8127 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
8128 continue;
8129 do_in = true;
8131 else
8133 do_out = true;
8134 if (lang_hooks.decls.omp_private_outer_ref (val))
8135 do_in = true;
8137 break;
8139 case OMP_CLAUSE_REDUCTION:
8140 case OMP_CLAUSE_IN_REDUCTION:
8141 do_in = true;
8142 if (val == OMP_CLAUSE_DECL (c))
8144 if (is_task_ctx (ctx))
8145 by_ref = use_pointer_for_field (val, ctx);
8146 else
8147 do_out = !(by_ref || omp_privatize_by_reference (val));
8149 else
8150 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
8151 break;
8153 default:
8154 gcc_unreachable ();
8157 if (do_in)
8159 ref = build_sender_ref (val, ctx);
8160 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
8161 gimplify_assign (ref, x, ilist);
8162 if (is_task_ctx (ctx))
8163 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
8166 if (do_out)
8168 ref = build_sender_ref (val, ctx);
8169 gimplify_assign (var, ref, olist);
8174 /* Generate code to implement SHARED from the sender (aka parent)
8175 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
8176 list things that got automatically shared. */
8178 static void
8179 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
8181 tree var, ovar, nvar, t, f, x, record_type;
8183 if (ctx->record_type == NULL)
8184 return;
8186 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
8187 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
8189 ovar = DECL_ABSTRACT_ORIGIN (f);
8190 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
8191 continue;
8193 nvar = maybe_lookup_decl (ovar, ctx);
8194 if (!nvar
8195 || !DECL_HAS_VALUE_EXPR_P (nvar)
8196 || (ctx->allocate_map
8197 && ctx->allocate_map->get (ovar)))
8198 continue;
8200 /* If CTX is a nested parallel directive. Find the immediately
8201 enclosing parallel or workshare construct that contains a
8202 mapping for OVAR. */
8203 var = lookup_decl_in_outer_ctx (ovar, ctx);
8205 t = omp_member_access_dummy_var (var);
8206 if (t)
8208 var = DECL_VALUE_EXPR (var);
8209 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
8210 if (o != t)
8211 var = unshare_and_remap (var, t, o);
8212 else
8213 var = unshare_expr (var);
8216 if (use_pointer_for_field (ovar, ctx))
8218 x = build_sender_ref (ovar, ctx);
8219 if (TREE_CODE (TREE_TYPE (f)) == ARRAY_TYPE
8220 && TREE_TYPE (f) == TREE_TYPE (ovar))
8222 gcc_assert (is_parallel_ctx (ctx)
8223 && DECL_ARTIFICIAL (ovar));
8224 /* _condtemp_ clause. */
8225 var = build_constructor (TREE_TYPE (x), NULL);
8227 else
8228 var = build_fold_addr_expr (var);
8229 gimplify_assign (x, var, ilist);
8231 else
8233 x = build_sender_ref (ovar, ctx);
8234 gimplify_assign (x, var, ilist);
8236 if (!TREE_READONLY (var)
8237 /* We don't need to receive a new reference to a result
8238 or parm decl. In fact we may not store to it as we will
8239 invalidate any pending RSO and generate wrong gimple
8240 during inlining. */
8241 && !((TREE_CODE (var) == RESULT_DECL
8242 || TREE_CODE (var) == PARM_DECL)
8243 && DECL_BY_REFERENCE (var)))
8245 x = build_sender_ref (ovar, ctx);
8246 gimplify_assign (var, x, olist);
8252 /* Emit an OpenACC head marker call, encapulating the partitioning and
8253 other information that must be processed by the target compiler.
8254 Return the maximum number of dimensions the associated loop might
8255 be partitioned over. */
8257 static unsigned
8258 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
8259 gimple_seq *seq, omp_context *ctx)
8261 unsigned levels = 0;
8262 unsigned tag = 0;
8263 tree gang_static = NULL_TREE;
8264 auto_vec<tree, 5> args;
8266 args.quick_push (build_int_cst
8267 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
8268 args.quick_push (ddvar);
8269 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8271 switch (OMP_CLAUSE_CODE (c))
8273 case OMP_CLAUSE_GANG:
8274 tag |= OLF_DIM_GANG;
8275 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
8276 /* static:* is represented by -1, and we can ignore it, as
8277 scheduling is always static. */
8278 if (gang_static && integer_minus_onep (gang_static))
8279 gang_static = NULL_TREE;
8280 levels++;
8281 break;
8283 case OMP_CLAUSE_WORKER:
8284 tag |= OLF_DIM_WORKER;
8285 levels++;
8286 break;
8288 case OMP_CLAUSE_VECTOR:
8289 tag |= OLF_DIM_VECTOR;
8290 levels++;
8291 break;
8293 case OMP_CLAUSE_SEQ:
8294 tag |= OLF_SEQ;
8295 break;
8297 case OMP_CLAUSE_AUTO:
8298 tag |= OLF_AUTO;
8299 break;
8301 case OMP_CLAUSE_INDEPENDENT:
8302 tag |= OLF_INDEPENDENT;
8303 break;
8305 case OMP_CLAUSE_TILE:
8306 tag |= OLF_TILE;
8307 break;
8309 case OMP_CLAUSE_REDUCTION:
8310 tag |= OLF_REDUCTION;
8311 break;
8313 default:
8314 continue;
8318 if (gang_static)
8320 if (DECL_P (gang_static))
8321 gang_static = build_outer_var_ref (gang_static, ctx);
8322 tag |= OLF_GANG_STATIC;
8325 omp_context *tgt = enclosing_target_ctx (ctx);
8326 if (!tgt || is_oacc_parallel_or_serial (tgt))
8328 else if (is_oacc_kernels (tgt))
8329 /* Not using this loops handling inside OpenACC 'kernels' regions. */
8330 gcc_unreachable ();
8331 else if (is_oacc_kernels_decomposed_part (tgt))
8333 else
8334 gcc_unreachable ();
8336 /* In a parallel region, loops are implicitly INDEPENDENT. */
8337 if (!tgt || is_oacc_parallel_or_serial (tgt))
8338 tag |= OLF_INDEPENDENT;
8340 /* Loops inside OpenACC 'kernels' decomposed parts' regions are expected to
8341 have an explicit 'seq' or 'independent' clause, and no 'auto' clause. */
8342 if (tgt && is_oacc_kernels_decomposed_part (tgt))
8344 gcc_assert (tag & (OLF_SEQ | OLF_INDEPENDENT));
8345 gcc_assert (!(tag & OLF_AUTO));
8348 if (tag & OLF_TILE)
8349 /* Tiling could use all 3 levels. */
8350 levels = 3;
8351 else
8353 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
8354 Ensure at least one level, or 2 for possible auto
8355 partitioning */
8356 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
8357 << OLF_DIM_BASE) | OLF_SEQ));
8359 if (levels < 1u + maybe_auto)
8360 levels = 1u + maybe_auto;
8363 args.quick_push (build_int_cst (integer_type_node, levels));
8364 args.quick_push (build_int_cst (integer_type_node, tag));
8365 if (gang_static)
8366 args.quick_push (gang_static);
8368 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
8369 gimple_set_location (call, loc);
8370 gimple_set_lhs (call, ddvar);
8371 gimple_seq_add_stmt (seq, call);
8373 return levels;
8376 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
8377 partitioning level of the enclosed region. */
8379 static void
8380 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
8381 tree tofollow, gimple_seq *seq)
8383 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
8384 : IFN_UNIQUE_OACC_TAIL_MARK);
8385 tree marker = build_int_cst (integer_type_node, marker_kind);
8386 int nargs = 2 + (tofollow != NULL_TREE);
8387 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
8388 marker, ddvar, tofollow);
8389 gimple_set_location (call, loc);
8390 gimple_set_lhs (call, ddvar);
8391 gimple_seq_add_stmt (seq, call);
8394 /* Generate the before and after OpenACC loop sequences. CLAUSES are
8395 the loop clauses, from which we extract reductions. Initialize
8396 HEAD and TAIL. */
8398 static void
8399 lower_oacc_head_tail (location_t loc, tree clauses, gcall *private_marker,
8400 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
8402 bool inner = false;
8403 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
8404 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
8406 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
8408 if (private_marker)
8410 gimple_set_location (private_marker, loc);
8411 gimple_call_set_lhs (private_marker, ddvar);
8412 gimple_call_set_arg (private_marker, 1, ddvar);
8415 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
8416 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
8418 gcc_assert (count);
8419 for (unsigned done = 1; count; count--, done++)
8421 gimple_seq fork_seq = NULL;
8422 gimple_seq join_seq = NULL;
8424 tree place = build_int_cst (integer_type_node, -1);
8425 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
8426 fork_kind, ddvar, place);
8427 gimple_set_location (fork, loc);
8428 gimple_set_lhs (fork, ddvar);
8430 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
8431 join_kind, ddvar, place);
8432 gimple_set_location (join, loc);
8433 gimple_set_lhs (join, ddvar);
8435 /* Mark the beginning of this level sequence. */
8436 if (inner)
8437 lower_oacc_loop_marker (loc, ddvar, true,
8438 build_int_cst (integer_type_node, count),
8439 &fork_seq);
8440 lower_oacc_loop_marker (loc, ddvar, false,
8441 build_int_cst (integer_type_node, done),
8442 &join_seq);
8444 lower_oacc_reductions (loc, clauses, place, inner,
8445 fork, (count == 1) ? private_marker : NULL,
8446 join, &fork_seq, &join_seq, ctx);
8448 /* Append this level to head. */
8449 gimple_seq_add_seq (head, fork_seq);
8450 /* Prepend it to tail. */
8451 gimple_seq_add_seq (&join_seq, *tail);
8452 *tail = join_seq;
8454 inner = true;
8457 /* Mark the end of the sequence. */
8458 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
8459 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
8462 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
8463 catch handler and return it. This prevents programs from violating the
8464 structured block semantics with throws. */
8466 static gimple_seq
8467 maybe_catch_exception (gimple_seq body)
8469 gimple *g;
8470 tree decl;
8472 if (!flag_exceptions)
8473 return body;
8475 if (lang_hooks.eh_protect_cleanup_actions != NULL)
8476 decl = lang_hooks.eh_protect_cleanup_actions ();
8477 else
8478 decl = builtin_decl_explicit (BUILT_IN_TRAP);
8480 g = gimple_build_eh_must_not_throw (decl);
8481 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
8482 GIMPLE_TRY_CATCH);
8484 return gimple_seq_alloc_with_stmt (g);
8488 /* Routines to lower OMP directives into OMP-GIMPLE. */
8490 /* If ctx is a worksharing context inside of a cancellable parallel
8491 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
8492 and conditional branch to parallel's cancel_label to handle
8493 cancellation in the implicit barrier. */
8495 static void
8496 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple *omp_return,
8497 gimple_seq *body)
8499 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
8500 if (gimple_omp_return_nowait_p (omp_return))
8501 return;
8502 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
8503 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
8504 && outer->cancellable)
8506 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
8507 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
8508 tree lhs = create_tmp_var (c_bool_type);
8509 gimple_omp_return_set_lhs (omp_return, lhs);
8510 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8511 gimple *g = gimple_build_cond (NE_EXPR, lhs,
8512 fold_convert (c_bool_type,
8513 boolean_false_node),
8514 outer->cancel_label, fallthru_label);
8515 gimple_seq_add_stmt (body, g);
8516 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
8518 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP
8519 && gimple_code (outer->stmt) != GIMPLE_OMP_SCOPE)
8520 return;
8523 /* Find the first task_reduction or reduction clause or return NULL
8524 if there are none. */
8526 static inline tree
8527 omp_task_reductions_find_first (tree clauses, enum tree_code code,
8528 enum omp_clause_code ccode)
8530 while (1)
8532 clauses = omp_find_clause (clauses, ccode);
8533 if (clauses == NULL_TREE)
8534 return NULL_TREE;
8535 if (ccode != OMP_CLAUSE_REDUCTION
8536 || code == OMP_TASKLOOP
8537 || OMP_CLAUSE_REDUCTION_TASK (clauses))
8538 return clauses;
8539 clauses = OMP_CLAUSE_CHAIN (clauses);
8543 static void lower_omp_task_reductions (omp_context *, enum tree_code, tree,
8544 gimple_seq *, gimple_seq *);
8546 /* Lower the OpenMP sections directive in the current statement in GSI_P.
8547 CTX is the enclosing OMP context for the current statement. */
8549 static void
8550 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8552 tree block, control;
8553 gimple_stmt_iterator tgsi;
8554 gomp_sections *stmt;
8555 gimple *t;
8556 gbind *new_stmt, *bind;
8557 gimple_seq ilist, dlist, olist, tred_dlist = NULL, clist = NULL, new_body;
8559 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
8561 push_gimplify_context ();
8563 dlist = NULL;
8564 ilist = NULL;
8566 tree rclauses
8567 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt),
8568 OMP_SECTIONS, OMP_CLAUSE_REDUCTION);
8569 tree rtmp = NULL_TREE;
8570 if (rclauses)
8572 tree type = build_pointer_type (pointer_sized_int_node);
8573 tree temp = create_tmp_var (type);
8574 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
8575 OMP_CLAUSE_DECL (c) = temp;
8576 OMP_CLAUSE_CHAIN (c) = gimple_omp_sections_clauses (stmt);
8577 gimple_omp_sections_set_clauses (stmt, c);
8578 lower_omp_task_reductions (ctx, OMP_SECTIONS,
8579 gimple_omp_sections_clauses (stmt),
8580 &ilist, &tred_dlist);
8581 rclauses = c;
8582 rtmp = make_ssa_name (type);
8583 gimple_seq_add_stmt (&ilist, gimple_build_assign (rtmp, temp));
8586 tree *clauses_ptr = gimple_omp_sections_clauses_ptr (stmt);
8587 lower_lastprivate_conditional_clauses (clauses_ptr, ctx);
8589 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
8590 &ilist, &dlist, ctx, NULL);
8592 control = create_tmp_var (unsigned_type_node, ".section");
8593 gimple_omp_sections_set_control (stmt, control);
8595 new_body = gimple_omp_body (stmt);
8596 gimple_omp_set_body (stmt, NULL);
8597 tgsi = gsi_start (new_body);
8598 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
8600 omp_context *sctx;
8601 gimple *sec_start;
8603 sec_start = gsi_stmt (tgsi);
8604 sctx = maybe_lookup_ctx (sec_start);
8605 gcc_assert (sctx);
8607 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
8608 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
8609 GSI_CONTINUE_LINKING);
8610 gimple_omp_set_body (sec_start, NULL);
8612 if (gsi_one_before_end_p (tgsi))
8614 gimple_seq l = NULL;
8615 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
8616 &ilist, &l, &clist, ctx);
8617 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
8618 gimple_omp_section_set_last (sec_start);
8621 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
8622 GSI_CONTINUE_LINKING);
8625 block = make_node (BLOCK);
8626 bind = gimple_build_bind (NULL, new_body, block);
8628 olist = NULL;
8629 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist,
8630 &clist, ctx);
8631 if (clist)
8633 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
8634 gcall *g = gimple_build_call (fndecl, 0);
8635 gimple_seq_add_stmt (&olist, g);
8636 gimple_seq_add_seq (&olist, clist);
8637 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
8638 g = gimple_build_call (fndecl, 0);
8639 gimple_seq_add_stmt (&olist, g);
8642 block = make_node (BLOCK);
8643 new_stmt = gimple_build_bind (NULL, NULL, block);
8644 gsi_replace (gsi_p, new_stmt, true);
8646 pop_gimplify_context (new_stmt);
8647 gimple_bind_append_vars (new_stmt, ctx->block_vars);
8648 BLOCK_VARS (block) = gimple_bind_vars (bind);
8649 if (BLOCK_VARS (block))
8650 TREE_USED (block) = 1;
8652 new_body = NULL;
8653 gimple_seq_add_seq (&new_body, ilist);
8654 gimple_seq_add_stmt (&new_body, stmt);
8655 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
8656 gimple_seq_add_stmt (&new_body, bind);
8658 t = gimple_build_omp_continue (control, control);
8659 gimple_seq_add_stmt (&new_body, t);
8661 gimple_seq_add_seq (&new_body, olist);
8662 if (ctx->cancellable)
8663 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
8664 gimple_seq_add_seq (&new_body, dlist);
8666 new_body = maybe_catch_exception (new_body);
8668 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
8669 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8670 t = gimple_build_omp_return (nowait);
8671 gimple_seq_add_stmt (&new_body, t);
8672 gimple_seq_add_seq (&new_body, tred_dlist);
8673 maybe_add_implicit_barrier_cancel (ctx, t, &new_body);
8675 if (rclauses)
8676 OMP_CLAUSE_DECL (rclauses) = rtmp;
8678 gimple_bind_set_body (new_stmt, new_body);
8682 /* A subroutine of lower_omp_single. Expand the simple form of
8683 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
8685 if (GOMP_single_start ())
8686 BODY;
8687 [ GOMP_barrier (); ] -> unless 'nowait' is present.
8689 FIXME. It may be better to delay expanding the logic of this until
8690 pass_expand_omp. The expanded logic may make the job more difficult
8691 to a synchronization analysis pass. */
8693 static void
8694 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
8696 location_t loc = gimple_location (single_stmt);
8697 tree tlabel = create_artificial_label (loc);
8698 tree flabel = create_artificial_label (loc);
8699 gimple *call, *cond;
8700 tree lhs, decl;
8702 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
8703 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
8704 call = gimple_build_call (decl, 0);
8705 gimple_call_set_lhs (call, lhs);
8706 gimple_seq_add_stmt (pre_p, call);
8708 cond = gimple_build_cond (EQ_EXPR, lhs,
8709 fold_convert_loc (loc, TREE_TYPE (lhs),
8710 boolean_true_node),
8711 tlabel, flabel);
8712 gimple_seq_add_stmt (pre_p, cond);
8713 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
8714 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
8715 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
8719 /* A subroutine of lower_omp_single. Expand the simple form of
8720 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
8722 #pragma omp single copyprivate (a, b, c)
8724 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
8727 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
8729 BODY;
8730 copyout.a = a;
8731 copyout.b = b;
8732 copyout.c = c;
8733 GOMP_single_copy_end (&copyout);
8735 else
8737 a = copyout_p->a;
8738 b = copyout_p->b;
8739 c = copyout_p->c;
8741 GOMP_barrier ();
8744 FIXME. It may be better to delay expanding the logic of this until
8745 pass_expand_omp. The expanded logic may make the job more difficult
8746 to a synchronization analysis pass. */
8748 static void
8749 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
8750 omp_context *ctx)
8752 tree ptr_type, t, l0, l1, l2, bfn_decl;
8753 gimple_seq copyin_seq;
8754 location_t loc = gimple_location (single_stmt);
8756 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
8758 ptr_type = build_pointer_type (ctx->record_type);
8759 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
8761 l0 = create_artificial_label (loc);
8762 l1 = create_artificial_label (loc);
8763 l2 = create_artificial_label (loc);
8765 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
8766 t = build_call_expr_loc (loc, bfn_decl, 0);
8767 t = fold_convert_loc (loc, ptr_type, t);
8768 gimplify_assign (ctx->receiver_decl, t, pre_p);
8770 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
8771 build_int_cst (ptr_type, 0));
8772 t = build3 (COND_EXPR, void_type_node, t,
8773 build_and_jump (&l0), build_and_jump (&l1));
8774 gimplify_and_add (t, pre_p);
8776 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
8778 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
8780 copyin_seq = NULL;
8781 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
8782 &copyin_seq, ctx);
8784 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8785 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
8786 t = build_call_expr_loc (loc, bfn_decl, 1, t);
8787 gimplify_and_add (t, pre_p);
8789 t = build_and_jump (&l2);
8790 gimplify_and_add (t, pre_p);
8792 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
8794 gimple_seq_add_seq (pre_p, copyin_seq);
8796 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
8800 /* Expand code for an OpenMP single directive. */
8802 static void
8803 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8805 tree block;
8806 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
8807 gbind *bind;
8808 gimple_seq bind_body, bind_body_tail = NULL, dlist;
8810 push_gimplify_context ();
8812 block = make_node (BLOCK);
8813 bind = gimple_build_bind (NULL, NULL, block);
8814 gsi_replace (gsi_p, bind, true);
8815 bind_body = NULL;
8816 dlist = NULL;
8817 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
8818 &bind_body, &dlist, ctx, NULL);
8819 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
8821 gimple_seq_add_stmt (&bind_body, single_stmt);
8823 if (ctx->record_type)
8824 lower_omp_single_copy (single_stmt, &bind_body, ctx);
8825 else
8826 lower_omp_single_simple (single_stmt, &bind_body);
8828 gimple_omp_set_body (single_stmt, NULL);
8830 gimple_seq_add_seq (&bind_body, dlist);
8832 bind_body = maybe_catch_exception (bind_body);
8834 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
8835 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8836 gimple *g = gimple_build_omp_return (nowait);
8837 gimple_seq_add_stmt (&bind_body_tail, g);
8838 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
8839 if (ctx->record_type)
8841 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
8842 tree clobber = build_clobber (ctx->record_type);
8843 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
8844 clobber), GSI_SAME_STMT);
8846 gimple_seq_add_seq (&bind_body, bind_body_tail);
8847 gimple_bind_set_body (bind, bind_body);
8849 pop_gimplify_context (bind);
8851 gimple_bind_append_vars (bind, ctx->block_vars);
8852 BLOCK_VARS (block) = ctx->block_vars;
8853 if (BLOCK_VARS (block))
8854 TREE_USED (block) = 1;
8858 /* Lower code for an OMP scope directive. */
8860 static void
8861 lower_omp_scope (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8863 tree block;
8864 gimple *scope_stmt = gsi_stmt (*gsi_p);
8865 gbind *bind;
8866 gimple_seq bind_body, bind_body_tail = NULL, dlist;
8867 gimple_seq tred_dlist = NULL;
8869 push_gimplify_context ();
8871 block = make_node (BLOCK);
8872 bind = gimple_build_bind (NULL, NULL, block);
8873 gsi_replace (gsi_p, bind, true);
8874 bind_body = NULL;
8875 dlist = NULL;
8877 tree rclauses
8878 = omp_task_reductions_find_first (gimple_omp_scope_clauses (scope_stmt),
8879 OMP_SCOPE, OMP_CLAUSE_REDUCTION);
8880 if (rclauses)
8882 tree type = build_pointer_type (pointer_sized_int_node);
8883 tree temp = create_tmp_var (type);
8884 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
8885 OMP_CLAUSE_DECL (c) = temp;
8886 OMP_CLAUSE_CHAIN (c) = gimple_omp_scope_clauses (scope_stmt);
8887 gimple_omp_scope_set_clauses (scope_stmt, c);
8888 lower_omp_task_reductions (ctx, OMP_SCOPE,
8889 gimple_omp_scope_clauses (scope_stmt),
8890 &bind_body, &tred_dlist);
8891 rclauses = c;
8892 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_SCOPE_START);
8893 gimple *stmt = gimple_build_call (fndecl, 1, temp);
8894 gimple_seq_add_stmt (&bind_body, stmt);
8897 lower_rec_input_clauses (gimple_omp_scope_clauses (scope_stmt),
8898 &bind_body, &dlist, ctx, NULL);
8899 lower_omp (gimple_omp_body_ptr (scope_stmt), ctx);
8901 gimple_seq_add_stmt (&bind_body, scope_stmt);
8903 gimple_seq_add_seq (&bind_body, gimple_omp_body (scope_stmt));
8905 gimple_omp_set_body (scope_stmt, NULL);
8907 gimple_seq clist = NULL;
8908 lower_reduction_clauses (gimple_omp_scope_clauses (scope_stmt),
8909 &bind_body, &clist, ctx);
8910 if (clist)
8912 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
8913 gcall *g = gimple_build_call (fndecl, 0);
8914 gimple_seq_add_stmt (&bind_body, g);
8915 gimple_seq_add_seq (&bind_body, clist);
8916 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
8917 g = gimple_build_call (fndecl, 0);
8918 gimple_seq_add_stmt (&bind_body, g);
8921 gimple_seq_add_seq (&bind_body, dlist);
8923 bind_body = maybe_catch_exception (bind_body);
8925 bool nowait = omp_find_clause (gimple_omp_scope_clauses (scope_stmt),
8926 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8927 gimple *g = gimple_build_omp_return (nowait);
8928 gimple_seq_add_stmt (&bind_body_tail, g);
8929 gimple_seq_add_seq (&bind_body_tail, tred_dlist);
8930 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
8931 if (ctx->record_type)
8933 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
8934 tree clobber = build_clobber (ctx->record_type);
8935 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
8936 clobber), GSI_SAME_STMT);
8938 gimple_seq_add_seq (&bind_body, bind_body_tail);
8940 gimple_bind_set_body (bind, bind_body);
8942 pop_gimplify_context (bind);
8944 gimple_bind_append_vars (bind, ctx->block_vars);
8945 BLOCK_VARS (block) = ctx->block_vars;
8946 if (BLOCK_VARS (block))
8947 TREE_USED (block) = 1;
8949 /* Expand code for an OpenMP master or masked directive. */
8951 static void
8952 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8954 tree block, lab = NULL, x, bfn_decl;
8955 gimple *stmt = gsi_stmt (*gsi_p);
8956 gbind *bind;
8957 location_t loc = gimple_location (stmt);
8958 gimple_seq tseq;
8959 tree filter = integer_zero_node;
8961 push_gimplify_context ();
8963 if (gimple_code (stmt) == GIMPLE_OMP_MASKED)
8965 filter = omp_find_clause (gimple_omp_masked_clauses (stmt),
8966 OMP_CLAUSE_FILTER);
8967 if (filter)
8968 filter = fold_convert (integer_type_node,
8969 OMP_CLAUSE_FILTER_EXPR (filter));
8970 else
8971 filter = integer_zero_node;
8973 block = make_node (BLOCK);
8974 bind = gimple_build_bind (NULL, NULL, block);
8975 gsi_replace (gsi_p, bind, true);
8976 gimple_bind_add_stmt (bind, stmt);
8978 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
8979 x = build_call_expr_loc (loc, bfn_decl, 0);
8980 x = build2 (EQ_EXPR, boolean_type_node, x, filter);
8981 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
8982 tseq = NULL;
8983 gimplify_and_add (x, &tseq);
8984 gimple_bind_add_seq (bind, tseq);
8986 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8987 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8988 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8989 gimple_omp_set_body (stmt, NULL);
8991 gimple_bind_add_stmt (bind, gimple_build_label (lab));
8993 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8995 pop_gimplify_context (bind);
8997 gimple_bind_append_vars (bind, ctx->block_vars);
8998 BLOCK_VARS (block) = ctx->block_vars;
9001 /* Helper function for lower_omp_task_reductions. For a specific PASS
9002 find out the current clause it should be processed, or return false
9003 if all have been processed already. */
9005 static inline bool
9006 omp_task_reduction_iterate (int pass, enum tree_code code,
9007 enum omp_clause_code ccode, tree *c, tree *decl,
9008 tree *type, tree *next)
9010 for (; *c; *c = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode))
9012 if (ccode == OMP_CLAUSE_REDUCTION
9013 && code != OMP_TASKLOOP
9014 && !OMP_CLAUSE_REDUCTION_TASK (*c))
9015 continue;
9016 *decl = OMP_CLAUSE_DECL (*c);
9017 *type = TREE_TYPE (*decl);
9018 if (TREE_CODE (*decl) == MEM_REF)
9020 if (pass != 1)
9021 continue;
9023 else
9025 if (omp_privatize_by_reference (*decl))
9026 *type = TREE_TYPE (*type);
9027 if (pass != (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type))))
9028 continue;
9030 *next = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode);
9031 return true;
9033 *decl = NULL_TREE;
9034 *type = NULL_TREE;
9035 *next = NULL_TREE;
9036 return false;
9039 /* Lower task_reduction and reduction clauses (the latter unless CODE is
9040 OMP_TASKGROUP only with task modifier). Register mapping of those in
9041 START sequence and reducing them and unregister them in the END sequence. */
9043 static void
9044 lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
9045 gimple_seq *start, gimple_seq *end)
9047 enum omp_clause_code ccode
9048 = (code == OMP_TASKGROUP
9049 ? OMP_CLAUSE_TASK_REDUCTION : OMP_CLAUSE_REDUCTION);
9050 tree cancellable = NULL_TREE;
9051 clauses = omp_task_reductions_find_first (clauses, code, ccode);
9052 if (clauses == NULL_TREE)
9053 return;
9054 if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
9056 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
9057 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
9058 && outer->cancellable)
9060 cancellable = error_mark_node;
9061 break;
9063 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP
9064 && gimple_code (outer->stmt) != GIMPLE_OMP_SCOPE)
9065 break;
9067 tree record_type = lang_hooks.types.make_type (RECORD_TYPE);
9068 tree *last = &TYPE_FIELDS (record_type);
9069 unsigned cnt = 0;
9070 if (cancellable)
9072 tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
9073 ptr_type_node);
9074 tree ifield = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
9075 integer_type_node);
9076 *last = field;
9077 DECL_CHAIN (field) = ifield;
9078 last = &DECL_CHAIN (ifield);
9079 DECL_CONTEXT (field) = record_type;
9080 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
9081 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
9082 DECL_CONTEXT (ifield) = record_type;
9083 if (TYPE_ALIGN (record_type) < DECL_ALIGN (ifield))
9084 SET_TYPE_ALIGN (record_type, DECL_ALIGN (ifield));
9086 for (int pass = 0; pass < 2; pass++)
9088 tree decl, type, next;
9089 for (tree c = clauses;
9090 omp_task_reduction_iterate (pass, code, ccode,
9091 &c, &decl, &type, &next); c = next)
9093 ++cnt;
9094 tree new_type = type;
9095 if (ctx->outer)
9096 new_type = remap_type (type, &ctx->outer->cb);
9097 tree field
9098 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL,
9099 DECL_P (decl) ? DECL_NAME (decl) : NULL_TREE,
9100 new_type);
9101 if (DECL_P (decl) && type == TREE_TYPE (decl))
9103 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
9104 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
9105 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
9107 else
9108 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
9109 DECL_CONTEXT (field) = record_type;
9110 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
9111 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
9112 *last = field;
9113 last = &DECL_CHAIN (field);
9114 tree bfield
9115 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL, NULL_TREE,
9116 boolean_type_node);
9117 DECL_CONTEXT (bfield) = record_type;
9118 if (TYPE_ALIGN (record_type) < DECL_ALIGN (bfield))
9119 SET_TYPE_ALIGN (record_type, DECL_ALIGN (bfield));
9120 *last = bfield;
9121 last = &DECL_CHAIN (bfield);
9124 *last = NULL_TREE;
9125 layout_type (record_type);
9127 /* Build up an array which registers with the runtime all the reductions
9128 and deregisters them at the end. Format documented in libgomp/task.c. */
9129 tree atype = build_array_type_nelts (pointer_sized_int_node, 7 + cnt * 3);
9130 tree avar = create_tmp_var_raw (atype);
9131 gimple_add_tmp_var (avar);
9132 TREE_ADDRESSABLE (avar) = 1;
9133 tree r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_zero_node,
9134 NULL_TREE, NULL_TREE);
9135 tree t = build_int_cst (pointer_sized_int_node, cnt);
9136 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9137 gimple_seq seq = NULL;
9138 tree sz = fold_convert (pointer_sized_int_node,
9139 TYPE_SIZE_UNIT (record_type));
9140 int cachesz = 64;
9141 sz = fold_build2 (PLUS_EXPR, pointer_sized_int_node, sz,
9142 build_int_cst (pointer_sized_int_node, cachesz - 1));
9143 sz = fold_build2 (BIT_AND_EXPR, pointer_sized_int_node, sz,
9144 build_int_cst (pointer_sized_int_node, ~(cachesz - 1)));
9145 ctx->task_reductions.create (1 + cnt);
9146 ctx->task_reduction_map = new hash_map<tree, unsigned>;
9147 ctx->task_reductions.quick_push (TREE_CODE (sz) == INTEGER_CST
9148 ? sz : NULL_TREE);
9149 sz = force_gimple_operand (sz, &seq, true, NULL_TREE);
9150 gimple_seq_add_seq (start, seq);
9151 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_one_node,
9152 NULL_TREE, NULL_TREE);
9153 gimple_seq_add_stmt (start, gimple_build_assign (r, sz));
9154 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
9155 NULL_TREE, NULL_TREE);
9156 t = build_int_cst (pointer_sized_int_node,
9157 MAX (TYPE_ALIGN_UNIT (record_type), (unsigned) cachesz));
9158 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9159 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (3),
9160 NULL_TREE, NULL_TREE);
9161 t = build_int_cst (pointer_sized_int_node, -1);
9162 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9163 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (4),
9164 NULL_TREE, NULL_TREE);
9165 t = build_int_cst (pointer_sized_int_node, 0);
9166 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9168 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
9169 and for each task reduction checks a bool right after the private variable
9170 within that thread's chunk; if the bool is clear, it hasn't been
9171 initialized and thus isn't going to be reduced nor destructed, otherwise
9172 reduce and destruct it. */
9173 tree idx = create_tmp_var (size_type_node);
9174 gimple_seq_add_stmt (end, gimple_build_assign (idx, size_zero_node));
9175 tree num_thr_sz = create_tmp_var (size_type_node);
9176 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
9177 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
9178 tree lab3 = NULL_TREE, lab7 = NULL_TREE;
9179 gimple *g;
9180 if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
9182 /* For worksharing constructs or scope, only perform it in the master
9183 thread, with the exception of cancelled implicit barriers - then only
9184 handle the current thread. */
9185 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
9186 t = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
9187 tree thr_num = create_tmp_var (integer_type_node);
9188 g = gimple_build_call (t, 0);
9189 gimple_call_set_lhs (g, thr_num);
9190 gimple_seq_add_stmt (end, g);
9191 if (cancellable)
9193 tree c;
9194 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9195 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
9196 lab3 = create_artificial_label (UNKNOWN_LOCATION);
9197 if (code == OMP_FOR)
9198 c = gimple_omp_for_clauses (ctx->stmt);
9199 else if (code == OMP_SECTIONS)
9200 c = gimple_omp_sections_clauses (ctx->stmt);
9201 else /* if (code == OMP_SCOPE) */
9202 c = gimple_omp_scope_clauses (ctx->stmt);
9203 c = OMP_CLAUSE_DECL (omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_));
9204 cancellable = c;
9205 g = gimple_build_cond (NE_EXPR, c, build_zero_cst (TREE_TYPE (c)),
9206 lab5, lab6);
9207 gimple_seq_add_stmt (end, g);
9208 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9209 g = gimple_build_assign (idx, NOP_EXPR, thr_num);
9210 gimple_seq_add_stmt (end, g);
9211 g = gimple_build_assign (num_thr_sz, PLUS_EXPR, idx,
9212 build_one_cst (TREE_TYPE (idx)));
9213 gimple_seq_add_stmt (end, g);
9214 gimple_seq_add_stmt (end, gimple_build_goto (lab3));
9215 gimple_seq_add_stmt (end, gimple_build_label (lab6));
9217 g = gimple_build_cond (NE_EXPR, thr_num, integer_zero_node, lab2, lab4);
9218 gimple_seq_add_stmt (end, g);
9219 gimple_seq_add_stmt (end, gimple_build_label (lab4));
9221 if (code != OMP_PARALLEL)
9223 t = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
9224 tree num_thr = create_tmp_var (integer_type_node);
9225 g = gimple_build_call (t, 0);
9226 gimple_call_set_lhs (g, num_thr);
9227 gimple_seq_add_stmt (end, g);
9228 g = gimple_build_assign (num_thr_sz, NOP_EXPR, num_thr);
9229 gimple_seq_add_stmt (end, g);
9230 if (cancellable)
9231 gimple_seq_add_stmt (end, gimple_build_label (lab3));
9233 else
9235 tree c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
9236 OMP_CLAUSE__REDUCTEMP_);
9237 t = fold_convert (pointer_sized_int_node, OMP_CLAUSE_DECL (c));
9238 t = fold_convert (size_type_node, t);
9239 gimplify_assign (num_thr_sz, t, end);
9241 t = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
9242 NULL_TREE, NULL_TREE);
9243 tree data = create_tmp_var (pointer_sized_int_node);
9244 gimple_seq_add_stmt (end, gimple_build_assign (data, t));
9245 if (code == OMP_TASKLOOP)
9247 lab7 = create_artificial_label (UNKNOWN_LOCATION);
9248 g = gimple_build_cond (NE_EXPR, data,
9249 build_zero_cst (pointer_sized_int_node),
9250 lab1, lab7);
9251 gimple_seq_add_stmt (end, g);
9253 gimple_seq_add_stmt (end, gimple_build_label (lab1));
9254 tree ptr;
9255 if (TREE_CODE (TYPE_SIZE_UNIT (record_type)) == INTEGER_CST)
9256 ptr = create_tmp_var (build_pointer_type (record_type));
9257 else
9258 ptr = create_tmp_var (ptr_type_node);
9259 gimple_seq_add_stmt (end, gimple_build_assign (ptr, NOP_EXPR, data));
9261 tree field = TYPE_FIELDS (record_type);
9262 cnt = 0;
9263 if (cancellable)
9264 field = DECL_CHAIN (DECL_CHAIN (field));
9265 for (int pass = 0; pass < 2; pass++)
9267 tree decl, type, next;
9268 for (tree c = clauses;
9269 omp_task_reduction_iterate (pass, code, ccode,
9270 &c, &decl, &type, &next); c = next)
9272 tree var = decl, ref;
9273 if (TREE_CODE (decl) == MEM_REF)
9275 var = TREE_OPERAND (var, 0);
9276 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
9277 var = TREE_OPERAND (var, 0);
9278 tree v = var;
9279 if (TREE_CODE (var) == ADDR_EXPR)
9280 var = TREE_OPERAND (var, 0);
9281 else if (INDIRECT_REF_P (var))
9282 var = TREE_OPERAND (var, 0);
9283 tree orig_var = var;
9284 if (is_variable_sized (var))
9286 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
9287 var = DECL_VALUE_EXPR (var);
9288 gcc_assert (INDIRECT_REF_P (var));
9289 var = TREE_OPERAND (var, 0);
9290 gcc_assert (DECL_P (var));
9292 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
9293 if (orig_var != var)
9294 gcc_assert (TREE_CODE (v) == ADDR_EXPR);
9295 else if (TREE_CODE (v) == ADDR_EXPR)
9296 t = build_fold_addr_expr (t);
9297 else if (INDIRECT_REF_P (v))
9298 t = build_fold_indirect_ref (t);
9299 if (TREE_CODE (TREE_OPERAND (decl, 0)) == POINTER_PLUS_EXPR)
9301 tree b = TREE_OPERAND (TREE_OPERAND (decl, 0), 1);
9302 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
9303 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, b);
9305 if (!integer_zerop (TREE_OPERAND (decl, 1)))
9306 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
9307 fold_convert (size_type_node,
9308 TREE_OPERAND (decl, 1)));
9310 else
9312 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
9313 if (!omp_privatize_by_reference (decl))
9314 t = build_fold_addr_expr (t);
9316 t = fold_convert (pointer_sized_int_node, t);
9317 seq = NULL;
9318 t = force_gimple_operand (t, &seq, true, NULL_TREE);
9319 gimple_seq_add_seq (start, seq);
9320 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9321 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
9322 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9323 t = unshare_expr (byte_position (field));
9324 t = fold_convert (pointer_sized_int_node, t);
9325 ctx->task_reduction_map->put (c, cnt);
9326 ctx->task_reductions.quick_push (TREE_CODE (t) == INTEGER_CST
9327 ? t : NULL_TREE);
9328 seq = NULL;
9329 t = force_gimple_operand (t, &seq, true, NULL_TREE);
9330 gimple_seq_add_seq (start, seq);
9331 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9332 size_int (7 + cnt * 3 + 1), NULL_TREE, NULL_TREE);
9333 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9335 tree bfield = DECL_CHAIN (field);
9336 tree cond;
9337 if (code == OMP_PARALLEL
9338 || code == OMP_FOR
9339 || code == OMP_SECTIONS
9340 || code == OMP_SCOPE)
9341 /* In parallel, worksharing or scope all threads unconditionally
9342 initialize all their task reduction private variables. */
9343 cond = boolean_true_node;
9344 else if (TREE_TYPE (ptr) == ptr_type_node)
9346 cond = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
9347 unshare_expr (byte_position (bfield)));
9348 seq = NULL;
9349 cond = force_gimple_operand (cond, &seq, true, NULL_TREE);
9350 gimple_seq_add_seq (end, seq);
9351 tree pbool = build_pointer_type (TREE_TYPE (bfield));
9352 cond = build2 (MEM_REF, TREE_TYPE (bfield), cond,
9353 build_int_cst (pbool, 0));
9355 else
9356 cond = build3 (COMPONENT_REF, TREE_TYPE (bfield),
9357 build_simple_mem_ref (ptr), bfield, NULL_TREE);
9358 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
9359 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
9360 tree condv = create_tmp_var (boolean_type_node);
9361 gimple_seq_add_stmt (end, gimple_build_assign (condv, cond));
9362 g = gimple_build_cond (NE_EXPR, condv, boolean_false_node,
9363 lab3, lab4);
9364 gimple_seq_add_stmt (end, g);
9365 gimple_seq_add_stmt (end, gimple_build_label (lab3));
9366 if (cancellable && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE)
9368 /* If this reduction doesn't need destruction and parallel
9369 has been cancelled, there is nothing to do for this
9370 reduction, so jump around the merge operation. */
9371 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9372 g = gimple_build_cond (NE_EXPR, cancellable,
9373 build_zero_cst (TREE_TYPE (cancellable)),
9374 lab4, lab5);
9375 gimple_seq_add_stmt (end, g);
9376 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9379 tree new_var;
9380 if (TREE_TYPE (ptr) == ptr_type_node)
9382 new_var = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
9383 unshare_expr (byte_position (field)));
9384 seq = NULL;
9385 new_var = force_gimple_operand (new_var, &seq, true, NULL_TREE);
9386 gimple_seq_add_seq (end, seq);
9387 tree pbool = build_pointer_type (TREE_TYPE (field));
9388 new_var = build2 (MEM_REF, TREE_TYPE (field), new_var,
9389 build_int_cst (pbool, 0));
9391 else
9392 new_var = build3 (COMPONENT_REF, TREE_TYPE (field),
9393 build_simple_mem_ref (ptr), field, NULL_TREE);
9395 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
9396 if (TREE_CODE (decl) != MEM_REF
9397 && omp_privatize_by_reference (decl))
9398 ref = build_simple_mem_ref (ref);
9399 /* reduction(-:var) sums up the partial results, so it acts
9400 identically to reduction(+:var). */
9401 if (rcode == MINUS_EXPR)
9402 rcode = PLUS_EXPR;
9403 if (TREE_CODE (decl) == MEM_REF)
9405 tree type = TREE_TYPE (new_var);
9406 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
9407 tree i = create_tmp_var (TREE_TYPE (v));
9408 tree ptype = build_pointer_type (TREE_TYPE (type));
9409 if (DECL_P (v))
9411 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
9412 tree vv = create_tmp_var (TREE_TYPE (v));
9413 gimplify_assign (vv, v, start);
9414 v = vv;
9416 ref = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9417 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
9418 new_var = build_fold_addr_expr (new_var);
9419 new_var = fold_convert (ptype, new_var);
9420 ref = fold_convert (ptype, ref);
9421 tree m = create_tmp_var (ptype);
9422 gimplify_assign (m, new_var, end);
9423 new_var = m;
9424 m = create_tmp_var (ptype);
9425 gimplify_assign (m, ref, end);
9426 ref = m;
9427 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), end);
9428 tree body = create_artificial_label (UNKNOWN_LOCATION);
9429 tree endl = create_artificial_label (UNKNOWN_LOCATION);
9430 gimple_seq_add_stmt (end, gimple_build_label (body));
9431 tree priv = build_simple_mem_ref (new_var);
9432 tree out = build_simple_mem_ref (ref);
9433 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9435 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9436 tree decl_placeholder
9437 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
9438 tree lab6 = NULL_TREE;
9439 if (cancellable)
9441 /* If this reduction needs destruction and parallel
9442 has been cancelled, jump around the merge operation
9443 to the destruction. */
9444 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9445 lab6 = create_artificial_label (UNKNOWN_LOCATION);
9446 tree zero = build_zero_cst (TREE_TYPE (cancellable));
9447 g = gimple_build_cond (NE_EXPR, cancellable, zero,
9448 lab6, lab5);
9449 gimple_seq_add_stmt (end, g);
9450 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9452 SET_DECL_VALUE_EXPR (placeholder, out);
9453 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9454 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
9455 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
9456 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
9457 gimple_seq_add_seq (end,
9458 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
9459 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9460 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9462 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
9463 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
9465 if (cancellable)
9466 gimple_seq_add_stmt (end, gimple_build_label (lab6));
9467 tree x = lang_hooks.decls.omp_clause_dtor (c, priv);
9468 if (x)
9470 gimple_seq tseq = NULL;
9471 gimplify_stmt (&x, &tseq);
9472 gimple_seq_add_seq (end, tseq);
9475 else
9477 tree x = build2 (rcode, TREE_TYPE (out), out, priv);
9478 out = unshare_expr (out);
9479 gimplify_assign (out, x, end);
9481 gimple *g
9482 = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
9483 TYPE_SIZE_UNIT (TREE_TYPE (type)));
9484 gimple_seq_add_stmt (end, g);
9485 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
9486 TYPE_SIZE_UNIT (TREE_TYPE (type)));
9487 gimple_seq_add_stmt (end, g);
9488 g = gimple_build_assign (i, PLUS_EXPR, i,
9489 build_int_cst (TREE_TYPE (i), 1));
9490 gimple_seq_add_stmt (end, g);
9491 g = gimple_build_cond (LE_EXPR, i, v, body, endl);
9492 gimple_seq_add_stmt (end, g);
9493 gimple_seq_add_stmt (end, gimple_build_label (endl));
9495 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9497 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9498 tree oldv = NULL_TREE;
9499 tree lab6 = NULL_TREE;
9500 if (cancellable)
9502 /* If this reduction needs destruction and parallel
9503 has been cancelled, jump around the merge operation
9504 to the destruction. */
9505 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9506 lab6 = create_artificial_label (UNKNOWN_LOCATION);
9507 tree zero = build_zero_cst (TREE_TYPE (cancellable));
9508 g = gimple_build_cond (NE_EXPR, cancellable, zero,
9509 lab6, lab5);
9510 gimple_seq_add_stmt (end, g);
9511 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9513 if (omp_privatize_by_reference (decl)
9514 && !useless_type_conversion_p (TREE_TYPE (placeholder),
9515 TREE_TYPE (ref)))
9516 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
9517 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
9518 tree refv = create_tmp_var (TREE_TYPE (ref));
9519 gimplify_assign (refv, ref, end);
9520 ref = build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c), refv);
9521 SET_DECL_VALUE_EXPR (placeholder, ref);
9522 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9523 tree d = maybe_lookup_decl (decl, ctx);
9524 gcc_assert (d);
9525 if (DECL_HAS_VALUE_EXPR_P (d))
9526 oldv = DECL_VALUE_EXPR (d);
9527 if (omp_privatize_by_reference (var))
9529 tree v = fold_convert (TREE_TYPE (d),
9530 build_fold_addr_expr (new_var));
9531 SET_DECL_VALUE_EXPR (d, v);
9533 else
9534 SET_DECL_VALUE_EXPR (d, new_var);
9535 DECL_HAS_VALUE_EXPR_P (d) = 1;
9536 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
9537 if (oldv)
9538 SET_DECL_VALUE_EXPR (d, oldv);
9539 else
9541 SET_DECL_VALUE_EXPR (d, NULL_TREE);
9542 DECL_HAS_VALUE_EXPR_P (d) = 0;
9544 gimple_seq_add_seq (end, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
9545 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9546 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9547 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
9548 if (cancellable)
9549 gimple_seq_add_stmt (end, gimple_build_label (lab6));
9550 tree x = lang_hooks.decls.omp_clause_dtor (c, new_var);
9551 if (x)
9553 gimple_seq tseq = NULL;
9554 gimplify_stmt (&x, &tseq);
9555 gimple_seq_add_seq (end, tseq);
9558 else
9560 tree x = build2 (rcode, TREE_TYPE (ref), ref, new_var);
9561 ref = unshare_expr (ref);
9562 gimplify_assign (ref, x, end);
9564 gimple_seq_add_stmt (end, gimple_build_label (lab4));
9565 ++cnt;
9566 field = DECL_CHAIN (bfield);
9570 if (code == OMP_TASKGROUP)
9572 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER);
9573 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
9574 gimple_seq_add_stmt (start, g);
9576 else
9578 tree c;
9579 if (code == OMP_FOR)
9580 c = gimple_omp_for_clauses (ctx->stmt);
9581 else if (code == OMP_SECTIONS)
9582 c = gimple_omp_sections_clauses (ctx->stmt);
9583 else if (code == OMP_SCOPE)
9584 c = gimple_omp_scope_clauses (ctx->stmt);
9585 else
9586 c = gimple_omp_taskreg_clauses (ctx->stmt);
9587 c = omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_);
9588 t = fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c)),
9589 build_fold_addr_expr (avar));
9590 gimplify_assign (OMP_CLAUSE_DECL (c), t, start);
9593 gimple_seq_add_stmt (end, gimple_build_assign (data, PLUS_EXPR, data, sz));
9594 gimple_seq_add_stmt (end, gimple_build_assign (idx, PLUS_EXPR, idx,
9595 size_one_node));
9596 g = gimple_build_cond (NE_EXPR, idx, num_thr_sz, lab1, lab2);
9597 gimple_seq_add_stmt (end, g);
9598 gimple_seq_add_stmt (end, gimple_build_label (lab2));
9599 if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
9601 enum built_in_function bfn
9602 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER;
9603 t = builtin_decl_explicit (bfn);
9604 tree c_bool_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t)));
9605 tree arg;
9606 if (cancellable)
9608 arg = create_tmp_var (c_bool_type);
9609 gimple_seq_add_stmt (end, gimple_build_assign (arg, NOP_EXPR,
9610 cancellable));
9612 else
9613 arg = build_int_cst (c_bool_type, 0);
9614 g = gimple_build_call (t, 1, arg);
9616 else
9618 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER);
9619 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
9621 gimple_seq_add_stmt (end, g);
9622 if (lab7)
9623 gimple_seq_add_stmt (end, gimple_build_label (lab7));
9624 t = build_constructor (atype, NULL);
9625 TREE_THIS_VOLATILE (t) = 1;
9626 gimple_seq_add_stmt (end, gimple_build_assign (avar, t));
9629 /* Expand code for an OpenMP taskgroup directive. */
9631 static void
9632 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9634 gimple *stmt = gsi_stmt (*gsi_p);
9635 gcall *x;
9636 gbind *bind;
9637 gimple_seq dseq = NULL;
9638 tree block = make_node (BLOCK);
9640 bind = gimple_build_bind (NULL, NULL, block);
9641 gsi_replace (gsi_p, bind, true);
9642 gimple_bind_add_stmt (bind, stmt);
9644 push_gimplify_context ();
9646 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
9648 gimple_bind_add_stmt (bind, x);
9650 lower_omp_task_reductions (ctx, OMP_TASKGROUP,
9651 gimple_omp_taskgroup_clauses (stmt),
9652 gimple_bind_body_ptr (bind), &dseq);
9654 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9655 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9656 gimple_omp_set_body (stmt, NULL);
9658 gimple_bind_add_seq (bind, dseq);
9660 pop_gimplify_context (bind);
9662 gimple_bind_append_vars (bind, ctx->block_vars);
9663 BLOCK_VARS (block) = ctx->block_vars;
9667 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
9669 static void
9670 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
9671 omp_context *ctx)
9673 struct omp_for_data fd;
9674 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
9675 return;
9677 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
9678 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
9679 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
9680 if (!fd.ordered)
9681 return;
9683 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
9684 tree c = gimple_omp_ordered_clauses (ord_stmt);
9685 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
9686 && OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SINK)
9688 /* Merge depend clauses from multiple adjacent
9689 #pragma omp ordered depend(sink:...) constructs
9690 into one #pragma omp ordered depend(sink:...), so that
9691 we can optimize them together. */
9692 gimple_stmt_iterator gsi = *gsi_p;
9693 gsi_next (&gsi);
9694 while (!gsi_end_p (gsi))
9696 gimple *stmt = gsi_stmt (gsi);
9697 if (is_gimple_debug (stmt)
9698 || gimple_code (stmt) == GIMPLE_NOP)
9700 gsi_next (&gsi);
9701 continue;
9703 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
9704 break;
9705 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
9706 c = gimple_omp_ordered_clauses (ord_stmt2);
9707 if (c == NULL_TREE
9708 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DOACROSS
9709 || OMP_CLAUSE_DOACROSS_KIND (c) != OMP_CLAUSE_DOACROSS_SINK)
9710 break;
9711 while (*list_p)
9712 list_p = &OMP_CLAUSE_CHAIN (*list_p);
9713 *list_p = c;
9714 gsi_remove (&gsi, true);
9718 /* Canonicalize sink dependence clauses into one folded clause if
9719 possible.
9721 The basic algorithm is to create a sink vector whose first
9722 element is the GCD of all the first elements, and whose remaining
9723 elements are the minimum of the subsequent columns.
9725 We ignore dependence vectors whose first element is zero because
9726 such dependencies are known to be executed by the same thread.
9728 We take into account the direction of the loop, so a minimum
9729 becomes a maximum if the loop is iterating forwards. We also
9730 ignore sink clauses where the loop direction is unknown, or where
9731 the offsets are clearly invalid because they are not a multiple
9732 of the loop increment.
9734 For example:
9736 #pragma omp for ordered(2)
9737 for (i=0; i < N; ++i)
9738 for (j=0; j < M; ++j)
9740 #pragma omp ordered \
9741 depend(sink:i-8,j-2) \
9742 depend(sink:i,j-1) \ // Completely ignored because i+0.
9743 depend(sink:i-4,j-3) \
9744 depend(sink:i-6,j-4)
9745 #pragma omp ordered depend(source)
9748 Folded clause is:
9750 depend(sink:-gcd(8,4,6),-min(2,3,4))
9751 -or-
9752 depend(sink:-2,-2)
9755 /* FIXME: Computing GCD's where the first element is zero is
9756 non-trivial in the presence of collapsed loops. Do this later. */
9757 if (fd.collapse > 1)
9758 return;
9760 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
9762 /* wide_int is not a POD so it must be default-constructed. */
9763 for (unsigned i = 0; i != 2 * len - 1; ++i)
9764 new (static_cast<void*>(folded_deps + i)) wide_int ();
9766 tree folded_dep = NULL_TREE;
9767 /* TRUE if the first dimension's offset is negative. */
9768 bool neg_offset_p = false;
9770 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
9771 unsigned int i;
9772 while ((c = *list_p) != NULL)
9774 bool remove = false;
9776 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS);
9777 if (OMP_CLAUSE_DOACROSS_KIND (c) != OMP_CLAUSE_DOACROSS_SINK)
9778 goto next_ordered_clause;
9780 tree vec;
9781 for (vec = OMP_CLAUSE_DECL (c), i = 0;
9782 vec && TREE_CODE (vec) == TREE_LIST;
9783 vec = TREE_CHAIN (vec), ++i)
9785 gcc_assert (i < len);
9787 /* omp_extract_for_data has canonicalized the condition. */
9788 gcc_assert (fd.loops[i].cond_code == LT_EXPR
9789 || fd.loops[i].cond_code == GT_EXPR);
9790 bool forward = fd.loops[i].cond_code == LT_EXPR;
9791 bool maybe_lexically_later = true;
9793 /* While the committee makes up its mind, bail if we have any
9794 non-constant steps. */
9795 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
9796 goto lower_omp_ordered_ret;
9798 tree itype = TREE_TYPE (TREE_VALUE (vec));
9799 if (POINTER_TYPE_P (itype))
9800 itype = sizetype;
9801 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
9802 TYPE_PRECISION (itype),
9803 TYPE_SIGN (itype));
9805 /* Ignore invalid offsets that are not multiples of the step. */
9806 if (!wi::multiple_of_p (wi::abs (offset),
9807 wi::abs (wi::to_wide (fd.loops[i].step)),
9808 UNSIGNED))
9810 warning_at (OMP_CLAUSE_LOCATION (c), OPT_Wopenmp,
9811 "ignoring %<sink%> clause with offset that is not "
9812 "a multiple of the loop step");
9813 remove = true;
9814 goto next_ordered_clause;
9817 /* Calculate the first dimension. The first dimension of
9818 the folded dependency vector is the GCD of the first
9819 elements, while ignoring any first elements whose offset
9820 is 0. */
9821 if (i == 0)
9823 /* Ignore dependence vectors whose first dimension is 0. */
9824 if (offset == 0)
9826 remove = true;
9827 goto next_ordered_clause;
9829 else
9831 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
9833 error_at (OMP_CLAUSE_LOCATION (c),
9834 "first offset must be in opposite direction "
9835 "of loop iterations");
9836 goto lower_omp_ordered_ret;
9838 if (forward)
9839 offset = -offset;
9840 neg_offset_p = forward;
9841 /* Initialize the first time around. */
9842 if (folded_dep == NULL_TREE)
9844 folded_dep = c;
9845 folded_deps[0] = offset;
9847 else
9848 folded_deps[0] = wi::gcd (folded_deps[0],
9849 offset, UNSIGNED);
9852 /* Calculate minimum for the remaining dimensions. */
9853 else
9855 folded_deps[len + i - 1] = offset;
9856 if (folded_dep == c)
9857 folded_deps[i] = offset;
9858 else if (maybe_lexically_later
9859 && !wi::eq_p (folded_deps[i], offset))
9861 if (forward ^ wi::gts_p (folded_deps[i], offset))
9863 unsigned int j;
9864 folded_dep = c;
9865 for (j = 1; j <= i; j++)
9866 folded_deps[j] = folded_deps[len + j - 1];
9868 else
9869 maybe_lexically_later = false;
9873 gcc_assert (i == len);
9875 remove = true;
9877 next_ordered_clause:
9878 if (remove)
9879 *list_p = OMP_CLAUSE_CHAIN (c);
9880 else
9881 list_p = &OMP_CLAUSE_CHAIN (c);
9884 if (folded_dep)
9886 if (neg_offset_p)
9887 folded_deps[0] = -folded_deps[0];
9889 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
9890 if (POINTER_TYPE_P (itype))
9891 itype = sizetype;
9893 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
9894 = wide_int_to_tree (itype, folded_deps[0]);
9895 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
9896 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
9899 lower_omp_ordered_ret:
9901 /* Ordered without clauses is #pragma omp threads, while we want
9902 a nop instead if we remove all clauses. */
9903 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
9904 gsi_replace (gsi_p, gimple_build_nop (), true);
9908 /* Expand code for an OpenMP ordered directive. */
9910 static void
9911 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9913 tree block;
9914 gimple *stmt = gsi_stmt (*gsi_p), *g;
9915 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
9916 gcall *x;
9917 gbind *bind;
9918 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9919 OMP_CLAUSE_SIMD);
9920 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
9921 loop. */
9922 bool maybe_simt
9923 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
9924 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9925 OMP_CLAUSE_THREADS);
9927 if (gimple_omp_ordered_standalone_p (ord_stmt))
9929 /* FIXME: This is needs to be moved to the expansion to verify various
9930 conditions only testable on cfg with dominators computed, and also
9931 all the depend clauses to be merged still might need to be available
9932 for the runtime checks. */
9933 if (0)
9934 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
9935 return;
9938 push_gimplify_context ();
9940 block = make_node (BLOCK);
9941 bind = gimple_build_bind (NULL, NULL, block);
9942 gsi_replace (gsi_p, bind, true);
9943 gimple_bind_add_stmt (bind, stmt);
9945 if (simd)
9947 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
9948 build_int_cst (NULL_TREE, threads));
9949 cfun->has_simduid_loops = true;
9951 else
9952 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
9954 gimple_bind_add_stmt (bind, x);
9956 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
9957 if (maybe_simt)
9959 counter = create_tmp_var (integer_type_node);
9960 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
9961 gimple_call_set_lhs (g, counter);
9962 gimple_bind_add_stmt (bind, g);
9964 body = create_artificial_label (UNKNOWN_LOCATION);
9965 test = create_artificial_label (UNKNOWN_LOCATION);
9966 gimple_bind_add_stmt (bind, gimple_build_label (body));
9968 tree simt_pred = create_tmp_var (integer_type_node);
9969 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
9970 gimple_call_set_lhs (g, simt_pred);
9971 gimple_bind_add_stmt (bind, g);
9973 tree t = create_artificial_label (UNKNOWN_LOCATION);
9974 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
9975 gimple_bind_add_stmt (bind, g);
9977 gimple_bind_add_stmt (bind, gimple_build_label (t));
9979 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9980 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
9981 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9982 gimple_omp_set_body (stmt, NULL);
9984 if (maybe_simt)
9986 gimple_bind_add_stmt (bind, gimple_build_label (test));
9987 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
9988 gimple_bind_add_stmt (bind, g);
9990 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
9991 tree nonneg = create_tmp_var (integer_type_node);
9992 gimple_seq tseq = NULL;
9993 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
9994 gimple_bind_add_seq (bind, tseq);
9996 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
9997 gimple_call_set_lhs (g, nonneg);
9998 gimple_bind_add_stmt (bind, g);
10000 tree end = create_artificial_label (UNKNOWN_LOCATION);
10001 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
10002 gimple_bind_add_stmt (bind, g);
10004 gimple_bind_add_stmt (bind, gimple_build_label (end));
10006 if (simd)
10007 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
10008 build_int_cst (NULL_TREE, threads));
10009 else
10010 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
10012 gimple_bind_add_stmt (bind, x);
10014 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
10016 pop_gimplify_context (bind);
10018 gimple_bind_append_vars (bind, ctx->block_vars);
10019 BLOCK_VARS (block) = gimple_bind_vars (bind);
10023 /* Expand code for an OpenMP scan directive and the structured block
10024 before the scan directive. */
10026 static void
10027 lower_omp_scan (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10029 gimple *stmt = gsi_stmt (*gsi_p);
10030 bool has_clauses
10031 = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)) != NULL;
10032 tree lane = NULL_TREE;
10033 gimple_seq before = NULL;
10034 omp_context *octx = ctx->outer;
10035 gcc_assert (octx);
10036 if (octx->scan_exclusive && !has_clauses)
10038 gimple_stmt_iterator gsi2 = *gsi_p;
10039 gsi_next (&gsi2);
10040 gimple *stmt2 = gsi_stmt (gsi2);
10041 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
10042 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
10043 the one with exclusive clause(s), comes first. */
10044 if (stmt2
10045 && gimple_code (stmt2) == GIMPLE_OMP_SCAN
10046 && gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt2)) != NULL)
10048 gsi_remove (gsi_p, false);
10049 gsi_insert_after (gsi_p, stmt, GSI_SAME_STMT);
10050 ctx = maybe_lookup_ctx (stmt2);
10051 gcc_assert (ctx);
10052 lower_omp_scan (gsi_p, ctx);
10053 return;
10057 bool input_phase = has_clauses ^ octx->scan_inclusive;
10058 bool is_simd = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
10059 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_SIMD);
10060 bool is_for = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
10061 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_FOR
10062 && !gimple_omp_for_combined_p (octx->stmt));
10063 bool is_for_simd = is_simd && gimple_omp_for_combined_into_p (octx->stmt);
10064 if (is_for_simd && octx->for_simd_scan_phase)
10065 is_simd = false;
10066 if (is_simd)
10067 if (tree c = omp_find_clause (gimple_omp_for_clauses (octx->stmt),
10068 OMP_CLAUSE__SIMDUID_))
10070 tree uid = OMP_CLAUSE__SIMDUID__DECL (c);
10071 lane = create_tmp_var (unsigned_type_node);
10072 tree t = build_int_cst (integer_type_node,
10073 input_phase ? 1
10074 : octx->scan_inclusive ? 2 : 3);
10075 gimple *g
10076 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 2, uid, t);
10077 gimple_call_set_lhs (g, lane);
10078 gimple_seq_add_stmt (&before, g);
10081 if (is_simd || is_for)
10083 for (tree c = gimple_omp_for_clauses (octx->stmt);
10084 c; c = OMP_CLAUSE_CHAIN (c))
10085 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
10086 && OMP_CLAUSE_REDUCTION_INSCAN (c))
10088 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10089 tree var = OMP_CLAUSE_DECL (c);
10090 tree new_var = lookup_decl (var, octx);
10091 tree val = new_var;
10092 tree var2 = NULL_TREE;
10093 tree var3 = NULL_TREE;
10094 tree var4 = NULL_TREE;
10095 tree lane0 = NULL_TREE;
10096 tree new_vard = new_var;
10097 if (omp_privatize_by_reference (var))
10099 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
10100 val = new_var;
10102 if (DECL_HAS_VALUE_EXPR_P (new_vard))
10104 val = DECL_VALUE_EXPR (new_vard);
10105 if (new_vard != new_var)
10107 gcc_assert (TREE_CODE (val) == ADDR_EXPR);
10108 val = TREE_OPERAND (val, 0);
10110 if (TREE_CODE (val) == ARRAY_REF
10111 && VAR_P (TREE_OPERAND (val, 0)))
10113 tree v = TREE_OPERAND (val, 0);
10114 if (lookup_attribute ("omp simd array",
10115 DECL_ATTRIBUTES (v)))
10117 val = unshare_expr (val);
10118 lane0 = TREE_OPERAND (val, 1);
10119 TREE_OPERAND (val, 1) = lane;
10120 var2 = lookup_decl (v, octx);
10121 if (octx->scan_exclusive)
10122 var4 = lookup_decl (var2, octx);
10123 if (input_phase
10124 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10125 var3 = maybe_lookup_decl (var4 ? var4 : var2, octx);
10126 if (!input_phase)
10128 var2 = build4 (ARRAY_REF, TREE_TYPE (val),
10129 var2, lane, NULL_TREE, NULL_TREE);
10130 TREE_THIS_NOTRAP (var2) = 1;
10131 if (octx->scan_exclusive)
10133 var4 = build4 (ARRAY_REF, TREE_TYPE (val),
10134 var4, lane, NULL_TREE,
10135 NULL_TREE);
10136 TREE_THIS_NOTRAP (var4) = 1;
10139 else
10140 var2 = val;
10143 gcc_assert (var2);
10145 else
10147 var2 = build_outer_var_ref (var, octx);
10148 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10150 var3 = maybe_lookup_decl (new_vard, octx);
10151 if (var3 == new_vard || var3 == NULL_TREE)
10152 var3 = NULL_TREE;
10153 else if (is_simd && octx->scan_exclusive && !input_phase)
10155 var4 = maybe_lookup_decl (var3, octx);
10156 if (var4 == var3 || var4 == NULL_TREE)
10158 if (TREE_ADDRESSABLE (TREE_TYPE (new_var)))
10160 var4 = var3;
10161 var3 = NULL_TREE;
10163 else
10164 var4 = NULL_TREE;
10168 if (is_simd
10169 && octx->scan_exclusive
10170 && !input_phase
10171 && var4 == NULL_TREE)
10172 var4 = create_tmp_var (TREE_TYPE (val));
10174 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10176 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
10177 if (input_phase)
10179 if (var3)
10181 /* If we've added a separate identity element
10182 variable, copy it over into val. */
10183 tree x = lang_hooks.decls.omp_clause_assign_op (c, val,
10184 var3);
10185 gimplify_and_add (x, &before);
10187 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
10189 /* Otherwise, assign to it the identity element. */
10190 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
10191 if (is_for)
10192 tseq = copy_gimple_seq_and_replace_locals (tseq);
10193 tree ref = build_outer_var_ref (var, octx);
10194 tree x = (DECL_HAS_VALUE_EXPR_P (new_vard)
10195 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
10196 if (x)
10198 if (new_vard != new_var)
10199 val = build_fold_addr_expr_loc (clause_loc, val);
10200 SET_DECL_VALUE_EXPR (new_vard, val);
10202 SET_DECL_VALUE_EXPR (placeholder, ref);
10203 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10204 lower_omp (&tseq, octx);
10205 if (x)
10206 SET_DECL_VALUE_EXPR (new_vard, x);
10207 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10208 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10209 gimple_seq_add_seq (&before, tseq);
10210 if (is_simd)
10211 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
10214 else if (is_simd)
10216 tree x;
10217 if (octx->scan_exclusive)
10219 tree v4 = unshare_expr (var4);
10220 tree v2 = unshare_expr (var2);
10221 x = lang_hooks.decls.omp_clause_assign_op (c, v4, v2);
10222 gimplify_and_add (x, &before);
10224 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10225 x = (DECL_HAS_VALUE_EXPR_P (new_vard)
10226 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
10227 tree vexpr = val;
10228 if (x && new_vard != new_var)
10229 vexpr = build_fold_addr_expr_loc (clause_loc, val);
10230 if (x)
10231 SET_DECL_VALUE_EXPR (new_vard, vexpr);
10232 SET_DECL_VALUE_EXPR (placeholder, var2);
10233 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10234 lower_omp (&tseq, octx);
10235 gimple_seq_add_seq (&before, tseq);
10236 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
10237 if (x)
10238 SET_DECL_VALUE_EXPR (new_vard, x);
10239 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10240 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10241 if (octx->scan_inclusive)
10243 x = lang_hooks.decls.omp_clause_assign_op (c, val,
10244 var2);
10245 gimplify_and_add (x, &before);
10247 else if (lane0 == NULL_TREE)
10249 x = lang_hooks.decls.omp_clause_assign_op (c, val,
10250 var4);
10251 gimplify_and_add (x, &before);
10255 else
10257 if (input_phase)
10259 /* input phase. Set val to initializer before
10260 the body. */
10261 tree x = omp_reduction_init (c, TREE_TYPE (new_var));
10262 gimplify_assign (val, x, &before);
10264 else if (is_simd)
10266 /* scan phase. */
10267 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
10268 if (code == MINUS_EXPR)
10269 code = PLUS_EXPR;
10271 tree x = build2 (code, TREE_TYPE (var2),
10272 unshare_expr (var2), unshare_expr (val));
10273 if (octx->scan_inclusive)
10275 gimplify_assign (unshare_expr (var2), x, &before);
10276 gimplify_assign (val, var2, &before);
10278 else
10280 gimplify_assign (unshare_expr (var4),
10281 unshare_expr (var2), &before);
10282 gimplify_assign (var2, x, &before);
10283 if (lane0 == NULL_TREE)
10284 gimplify_assign (val, var4, &before);
10288 if (octx->scan_exclusive && !input_phase && lane0)
10290 tree vexpr = unshare_expr (var4);
10291 TREE_OPERAND (vexpr, 1) = lane0;
10292 if (new_vard != new_var)
10293 vexpr = build_fold_addr_expr_loc (clause_loc, vexpr);
10294 SET_DECL_VALUE_EXPR (new_vard, vexpr);
10298 if (is_simd && !is_for_simd)
10300 gsi_insert_seq_after (gsi_p, gimple_omp_body (stmt), GSI_SAME_STMT);
10301 gsi_insert_seq_after (gsi_p, before, GSI_SAME_STMT);
10302 gsi_replace (gsi_p, gimple_build_nop (), true);
10303 return;
10305 lower_omp (gimple_omp_body_ptr (stmt), octx);
10306 if (before)
10308 gimple_stmt_iterator gsi = gsi_start (*gimple_omp_body_ptr (stmt));
10309 gsi_insert_seq_before (&gsi, before, GSI_SAME_STMT);
10314 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
10315 substitution of a couple of function calls. But in the NAMED case,
10316 requires that languages coordinate a symbol name. It is therefore
10317 best put here in common code. */
10319 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
10321 static void
10322 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10324 tree block;
10325 tree name, lock, unlock;
10326 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
10327 gbind *bind;
10328 location_t loc = gimple_location (stmt);
10329 gimple_seq tbody;
10331 name = gimple_omp_critical_name (stmt);
10332 if (name)
10334 tree decl;
10336 if (!critical_name_mutexes)
10337 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
10339 tree *n = critical_name_mutexes->get (name);
10340 if (n == NULL)
10342 char *new_str;
10344 decl = create_tmp_var_raw (ptr_type_node);
10346 new_str = ACONCAT ((".gomp_critical_user_",
10347 IDENTIFIER_POINTER (name), NULL));
10348 DECL_NAME (decl) = get_identifier (new_str);
10349 TREE_PUBLIC (decl) = 1;
10350 TREE_STATIC (decl) = 1;
10351 DECL_COMMON (decl) = 1;
10352 DECL_ARTIFICIAL (decl) = 1;
10353 DECL_IGNORED_P (decl) = 1;
10355 varpool_node::finalize_decl (decl);
10357 critical_name_mutexes->put (name, decl);
10359 else
10360 decl = *n;
10362 /* If '#pragma omp critical' is inside offloaded region or
10363 inside function marked as offloadable, the symbol must be
10364 marked as offloadable too. */
10365 omp_context *octx;
10366 if (cgraph_node::get (current_function_decl)->offloadable)
10367 varpool_node::get_create (decl)->offloadable = 1;
10368 else
10369 for (octx = ctx->outer; octx; octx = octx->outer)
10370 if (is_gimple_omp_offloaded (octx->stmt))
10372 varpool_node::get_create (decl)->offloadable = 1;
10373 break;
10376 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
10377 lock = build_call_expr_loc (loc, lock, 1,
10378 build_fold_addr_expr_loc (loc, decl));
10380 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
10381 unlock = build_call_expr_loc (loc, unlock, 1,
10382 build_fold_addr_expr_loc (loc, decl));
10384 else
10386 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
10387 lock = build_call_expr_loc (loc, lock, 0);
10389 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
10390 unlock = build_call_expr_loc (loc, unlock, 0);
10393 push_gimplify_context ();
10395 block = make_node (BLOCK);
10396 bind = gimple_build_bind (NULL, NULL, block);
10397 gsi_replace (gsi_p, bind, true);
10398 gimple_bind_add_stmt (bind, stmt);
10400 tbody = gimple_bind_body (bind);
10401 gimplify_and_add (lock, &tbody);
10402 gimple_bind_set_body (bind, tbody);
10404 lower_omp (gimple_omp_body_ptr (stmt), ctx);
10405 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
10406 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
10407 gimple_omp_set_body (stmt, NULL);
10409 tbody = gimple_bind_body (bind);
10410 gimplify_and_add (unlock, &tbody);
10411 gimple_bind_set_body (bind, tbody);
10413 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
10415 pop_gimplify_context (bind);
10416 gimple_bind_append_vars (bind, ctx->block_vars);
10417 BLOCK_VARS (block) = gimple_bind_vars (bind);
10420 /* A subroutine of lower_omp_for. Generate code to emit the predicate
10421 for a lastprivate clause. Given a loop control predicate of (V
10422 cond N2), we gate the clause on (!(V cond N2)). The lowered form
10423 is appended to *DLIST, iterator initialization is appended to
10424 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
10425 to be emitted in a critical section. */
10427 static void
10428 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
10429 gimple_seq *dlist, gimple_seq *clist,
10430 struct omp_context *ctx)
10432 tree clauses, cond, vinit;
10433 enum tree_code cond_code;
10434 gimple_seq stmts;
10436 cond_code = fd->loop.cond_code;
10437 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
10439 /* When possible, use a strict equality expression. This can let VRP
10440 type optimizations deduce the value and remove a copy. */
10441 if (tree_fits_shwi_p (fd->loop.step))
10443 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
10444 if (step == 1 || step == -1)
10445 cond_code = EQ_EXPR;
10448 tree n2 = fd->loop.n2;
10449 if (fd->collapse > 1
10450 && TREE_CODE (n2) != INTEGER_CST
10451 && gimple_omp_for_combined_into_p (fd->for_stmt))
10453 struct omp_context *taskreg_ctx = NULL;
10454 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
10456 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
10457 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
10458 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
10460 if (gimple_omp_for_combined_into_p (gfor))
10462 gcc_assert (ctx->outer->outer
10463 && is_parallel_ctx (ctx->outer->outer));
10464 taskreg_ctx = ctx->outer->outer;
10466 else
10468 struct omp_for_data outer_fd;
10469 omp_extract_for_data (gfor, &outer_fd, NULL);
10470 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
10473 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
10474 taskreg_ctx = ctx->outer->outer;
10476 else if (is_taskreg_ctx (ctx->outer))
10477 taskreg_ctx = ctx->outer;
10478 if (taskreg_ctx)
10480 int i;
10481 tree taskreg_clauses
10482 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
10483 tree innerc = omp_find_clause (taskreg_clauses,
10484 OMP_CLAUSE__LOOPTEMP_);
10485 gcc_assert (innerc);
10486 int count = fd->collapse;
10487 if (fd->non_rect
10488 && fd->last_nonrect == fd->first_nonrect + 1)
10489 if (tree v = gimple_omp_for_index (fd->for_stmt, fd->last_nonrect))
10490 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
10491 count += 4;
10492 for (i = 0; i < count; i++)
10494 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
10495 OMP_CLAUSE__LOOPTEMP_);
10496 gcc_assert (innerc);
10498 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
10499 OMP_CLAUSE__LOOPTEMP_);
10500 if (innerc)
10501 n2 = fold_convert (TREE_TYPE (n2),
10502 lookup_decl (OMP_CLAUSE_DECL (innerc),
10503 taskreg_ctx));
10506 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
10508 clauses = gimple_omp_for_clauses (fd->for_stmt);
10509 stmts = NULL;
10510 lower_lastprivate_clauses (clauses, cond, body_p, &stmts, clist, ctx);
10511 if (!gimple_seq_empty_p (stmts))
10513 gimple_seq_add_seq (&stmts, *dlist);
10514 *dlist = stmts;
10516 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
10517 vinit = fd->loop.n1;
10518 if (cond_code == EQ_EXPR
10519 && tree_fits_shwi_p (fd->loop.n2)
10520 && ! integer_zerop (fd->loop.n2))
10521 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
10522 else
10523 vinit = unshare_expr (vinit);
10525 /* Initialize the iterator variable, so that threads that don't execute
10526 any iterations don't execute the lastprivate clauses by accident. */
10527 gimplify_assign (fd->loop.v, vinit, body_p);
10531 /* OpenACC privatization.
10533 Or, in other words, *sharing* at the respective OpenACC level of
10534 parallelism.
10536 From a correctness perspective, a non-addressable variable can't be accessed
10537 outside the current thread, so it can go in a (faster than shared memory)
10538 register -- though that register may need to be broadcast in some
10539 circumstances. A variable can only meaningfully be "shared" across workers
10540 or vector lanes if its address is taken, e.g. by a call to an atomic
10541 builtin.
10543 From an optimisation perspective, the answer might be fuzzier: maybe
10544 sometimes, using shared memory directly would be faster than
10545 broadcasting. */
10547 static void
10548 oacc_privatization_begin_diagnose_var (const dump_flags_t l_dump_flags,
10549 const location_t loc, const tree c,
10550 const tree decl)
10552 const dump_user_location_t d_u_loc
10553 = dump_user_location_t::from_location_t (loc);
10554 /* PR100695 "Format decoder, quoting in 'dump_printf' etc." */
10555 #if __GNUC__ >= 10
10556 # pragma GCC diagnostic push
10557 # pragma GCC diagnostic ignored "-Wformat"
10558 #endif
10559 dump_printf_loc (l_dump_flags, d_u_loc,
10560 "variable %<%T%> ", decl);
10561 #if __GNUC__ >= 10
10562 # pragma GCC diagnostic pop
10563 #endif
10564 if (c)
10565 dump_printf (l_dump_flags,
10566 "in %qs clause ",
10567 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
10568 else
10569 dump_printf (l_dump_flags,
10570 "declared in block ");
10573 static bool
10574 oacc_privatization_candidate_p (const location_t loc, const tree c,
10575 const tree decl)
10577 dump_flags_t l_dump_flags = get_openacc_privatization_dump_flags ();
10579 /* There is some differentiation depending on block vs. clause. */
10580 bool block = !c;
10582 bool res = true;
10584 if (res && !VAR_P (decl))
10586 /* A PARM_DECL (appearing in a 'private' clause) is expected to have been
10587 privatized into a new VAR_DECL. */
10588 gcc_checking_assert (TREE_CODE (decl) != PARM_DECL);
10590 res = false;
10592 if (dump_enabled_p ())
10594 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10595 dump_printf (l_dump_flags,
10596 "potentially has improper OpenACC privatization level: %qs\n",
10597 get_tree_code_name (TREE_CODE (decl)));
10601 if (res && block && TREE_STATIC (decl))
10603 res = false;
10605 if (dump_enabled_p ())
10607 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10608 dump_printf (l_dump_flags,
10609 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10610 "static");
10614 if (res && block && DECL_EXTERNAL (decl))
10616 res = false;
10618 if (dump_enabled_p ())
10620 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10621 dump_printf (l_dump_flags,
10622 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10623 "external");
10627 if (res && !TREE_ADDRESSABLE (decl))
10629 res = false;
10631 if (dump_enabled_p ())
10633 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10634 dump_printf (l_dump_flags,
10635 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10636 "not addressable");
10640 /* If an artificial variable has been added to a bind, e.g.
10641 a compiler-generated temporary structure used by the Fortran front-end, do
10642 not consider it as a privatization candidate. Note that variables on
10643 the stack are private per-thread by default: making them "gang-private"
10644 for OpenACC actually means to share a single instance of a variable
10645 amongst all workers and threads spawned within each gang.
10646 At present, no compiler-generated artificial variables require such
10647 sharing semantics, so this is safe. */
10649 if (res && block && DECL_ARTIFICIAL (decl))
10651 res = false;
10653 if (dump_enabled_p ())
10655 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10656 dump_printf (l_dump_flags,
10657 "isn%'t candidate for adjusting OpenACC privatization "
10658 "level: %s\n", "artificial");
10662 if (res)
10664 if (dump_enabled_p ())
10666 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10667 dump_printf (l_dump_flags,
10668 "is candidate for adjusting OpenACC privatization level\n");
10672 if (dump_file && (dump_flags & TDF_DETAILS))
10674 print_generic_decl (dump_file, decl, dump_flags);
10675 fprintf (dump_file, "\n");
10678 return res;
10681 /* Scan CLAUSES for candidates for adjusting OpenACC privatization level in
10682 CTX. */
10684 static void
10685 oacc_privatization_scan_clause_chain (omp_context *ctx, tree clauses)
10687 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
10688 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE)
10690 tree decl = OMP_CLAUSE_DECL (c);
10692 tree new_decl = lookup_decl (decl, ctx);
10694 if (!oacc_privatization_candidate_p (OMP_CLAUSE_LOCATION (c), c,
10695 new_decl))
10696 continue;
10698 gcc_checking_assert
10699 (!ctx->oacc_privatization_candidates.contains (new_decl));
10700 ctx->oacc_privatization_candidates.safe_push (new_decl);
10704 /* Scan DECLS for candidates for adjusting OpenACC privatization level in
10705 CTX. */
10707 static void
10708 oacc_privatization_scan_decl_chain (omp_context *ctx, tree decls)
10710 for (tree decl = decls; decl; decl = DECL_CHAIN (decl))
10712 tree new_decl = lookup_decl (decl, ctx);
10713 gcc_checking_assert (new_decl == decl);
10715 if (!oacc_privatization_candidate_p (gimple_location (ctx->stmt), NULL,
10716 new_decl))
10717 continue;
10719 gcc_checking_assert
10720 (!ctx->oacc_privatization_candidates.contains (new_decl));
10721 ctx->oacc_privatization_candidates.safe_push (new_decl);
10725 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
10727 static tree
10728 omp_find_scan (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
10729 struct walk_stmt_info *wi)
10731 gimple *stmt = gsi_stmt (*gsi_p);
10733 *handled_ops_p = true;
10734 switch (gimple_code (stmt))
10736 WALK_SUBSTMTS;
10738 case GIMPLE_OMP_FOR:
10739 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD
10740 && gimple_omp_for_combined_into_p (stmt))
10741 *handled_ops_p = false;
10742 break;
10744 case GIMPLE_OMP_SCAN:
10745 *(gimple_stmt_iterator *) (wi->info) = *gsi_p;
10746 return integer_zero_node;
10747 default:
10748 break;
10750 return NULL;
10753 /* Helper function for lower_omp_for, add transformations for a worksharing
10754 loop with scan directives inside of it.
10755 For worksharing loop not combined with simd, transform:
10756 #pragma omp for reduction(inscan,+:r) private(i)
10757 for (i = 0; i < n; i = i + 1)
10760 update (r);
10762 #pragma omp scan inclusive(r)
10764 use (r);
10768 into two worksharing loops + code to merge results:
10770 num_threads = omp_get_num_threads ();
10771 thread_num = omp_get_thread_num ();
10772 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
10773 <D.2099>:
10774 var2 = r;
10775 goto <D.2101>;
10776 <D.2100>:
10777 // For UDRs this is UDR init, or if ctors are needed, copy from
10778 // var3 that has been constructed to contain the neutral element.
10779 var2 = 0;
10780 <D.2101>:
10781 ivar = 0;
10782 // The _scantemp_ clauses will arrange for rpriva to be initialized to
10783 // a shared array with num_threads elements and rprivb to a local array
10784 // number of elements equal to the number of (contiguous) iterations the
10785 // current thread will perform. controlb and controlp variables are
10786 // temporaries to handle deallocation of rprivb at the end of second
10787 // GOMP_FOR.
10788 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
10789 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
10790 for (i = 0; i < n; i = i + 1)
10793 // For UDRs this is UDR init or copy from var3.
10794 r = 0;
10795 // This is the input phase from user code.
10796 update (r);
10799 // For UDRs this is UDR merge.
10800 var2 = var2 + r;
10801 // Rather than handing it over to the user, save to local thread's
10802 // array.
10803 rprivb[ivar] = var2;
10804 // For exclusive scan, the above two statements are swapped.
10805 ivar = ivar + 1;
10808 // And remember the final value from this thread's into the shared
10809 // rpriva array.
10810 rpriva[(sizetype) thread_num] = var2;
10811 // If more than one thread, compute using Work-Efficient prefix sum
10812 // the inclusive parallel scan of the rpriva array.
10813 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
10814 <D.2102>:
10815 GOMP_barrier ();
10816 down = 0;
10817 k = 1;
10818 num_threadsu = (unsigned int) num_threads;
10819 thread_numup1 = (unsigned int) thread_num + 1;
10820 <D.2108>:
10821 twok = k << 1;
10822 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
10823 <D.2110>:
10824 down = 4294967295;
10825 k = k >> 1;
10826 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
10827 <D.2112>:
10828 k = k >> 1;
10829 <D.2111>:
10830 twok = k << 1;
10831 cplx = .MUL_OVERFLOW (thread_nump1, twok);
10832 mul = REALPART_EXPR <cplx>;
10833 ovf = IMAGPART_EXPR <cplx>;
10834 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
10835 <D.2116>:
10836 andv = k & down;
10837 andvm1 = andv + 4294967295;
10838 l = mul + andvm1;
10839 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
10840 <D.2120>:
10841 // For UDRs this is UDR merge, performed using var2 variable as temporary,
10842 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
10843 rpriva[l] = rpriva[l - k] + rpriva[l];
10844 <D.2117>:
10845 if (down == 0) goto <D.2121>; else goto <D.2122>;
10846 <D.2121>:
10847 k = k << 1;
10848 goto <D.2123>;
10849 <D.2122>:
10850 k = k >> 1;
10851 <D.2123>:
10852 GOMP_barrier ();
10853 if (k != 0) goto <D.2108>; else goto <D.2103>;
10854 <D.2103>:
10855 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
10856 <D.2124>:
10857 // For UDRs this is UDR init or copy from var3.
10858 var2 = 0;
10859 goto <D.2126>;
10860 <D.2125>:
10861 var2 = rpriva[thread_num - 1];
10862 <D.2126>:
10863 ivar = 0;
10864 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
10865 reduction(inscan,+:r) private(i)
10866 for (i = 0; i < n; i = i + 1)
10869 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
10870 r = var2 + rprivb[ivar];
10873 // This is the scan phase from user code.
10874 use (r);
10875 // Plus a bump of the iterator.
10876 ivar = ivar + 1;
10878 } */
10880 static void
10881 lower_omp_for_scan (gimple_seq *body_p, gimple_seq *dlist, gomp_for *stmt,
10882 struct omp_for_data *fd, omp_context *ctx)
10884 bool is_for_simd = gimple_omp_for_combined_p (stmt);
10885 gcc_assert (ctx->scan_inclusive || ctx->scan_exclusive);
10887 gimple_seq body = gimple_omp_body (stmt);
10888 gimple_stmt_iterator input1_gsi = gsi_none ();
10889 struct walk_stmt_info wi;
10890 memset (&wi, 0, sizeof (wi));
10891 wi.val_only = true;
10892 wi.info = (void *) &input1_gsi;
10893 walk_gimple_seq_mod (&body, omp_find_scan, NULL, &wi);
10894 gcc_assert (!gsi_end_p (input1_gsi));
10896 gimple *input_stmt1 = gsi_stmt (input1_gsi);
10897 gimple_stmt_iterator gsi = input1_gsi;
10898 gsi_next (&gsi);
10899 gimple_stmt_iterator scan1_gsi = gsi;
10900 gimple *scan_stmt1 = gsi_stmt (gsi);
10901 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
10903 gimple_seq input_body = gimple_omp_body (input_stmt1);
10904 gimple_seq scan_body = gimple_omp_body (scan_stmt1);
10905 gimple_omp_set_body (input_stmt1, NULL);
10906 gimple_omp_set_body (scan_stmt1, NULL);
10907 gimple_omp_set_body (stmt, NULL);
10909 gomp_for *new_stmt = as_a <gomp_for *> (gimple_copy (stmt));
10910 gimple_seq new_body = copy_gimple_seq_and_replace_locals (body);
10911 gimple_omp_set_body (stmt, body);
10912 gimple_omp_set_body (input_stmt1, input_body);
10914 gimple_stmt_iterator input2_gsi = gsi_none ();
10915 memset (&wi, 0, sizeof (wi));
10916 wi.val_only = true;
10917 wi.info = (void *) &input2_gsi;
10918 walk_gimple_seq_mod (&new_body, omp_find_scan, NULL, &wi);
10919 gcc_assert (!gsi_end_p (input2_gsi));
10921 gimple *input_stmt2 = gsi_stmt (input2_gsi);
10922 gsi = input2_gsi;
10923 gsi_next (&gsi);
10924 gimple_stmt_iterator scan2_gsi = gsi;
10925 gimple *scan_stmt2 = gsi_stmt (gsi);
10926 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
10927 gimple_omp_set_body (scan_stmt2, scan_body);
10929 gimple_stmt_iterator input3_gsi = gsi_none ();
10930 gimple_stmt_iterator scan3_gsi = gsi_none ();
10931 gimple_stmt_iterator input4_gsi = gsi_none ();
10932 gimple_stmt_iterator scan4_gsi = gsi_none ();
10933 gimple *input_stmt3 = NULL, *scan_stmt3 = NULL;
10934 gimple *input_stmt4 = NULL, *scan_stmt4 = NULL;
10935 omp_context *input_simd_ctx = NULL, *scan_simd_ctx = NULL;
10936 if (is_for_simd)
10938 memset (&wi, 0, sizeof (wi));
10939 wi.val_only = true;
10940 wi.info = (void *) &input3_gsi;
10941 walk_gimple_seq_mod (&input_body, omp_find_scan, NULL, &wi);
10942 gcc_assert (!gsi_end_p (input3_gsi));
10944 input_stmt3 = gsi_stmt (input3_gsi);
10945 gsi = input3_gsi;
10946 gsi_next (&gsi);
10947 scan3_gsi = gsi;
10948 scan_stmt3 = gsi_stmt (gsi);
10949 gcc_assert (scan_stmt3 && gimple_code (scan_stmt3) == GIMPLE_OMP_SCAN);
10951 memset (&wi, 0, sizeof (wi));
10952 wi.val_only = true;
10953 wi.info = (void *) &input4_gsi;
10954 walk_gimple_seq_mod (&scan_body, omp_find_scan, NULL, &wi);
10955 gcc_assert (!gsi_end_p (input4_gsi));
10957 input_stmt4 = gsi_stmt (input4_gsi);
10958 gsi = input4_gsi;
10959 gsi_next (&gsi);
10960 scan4_gsi = gsi;
10961 scan_stmt4 = gsi_stmt (gsi);
10962 gcc_assert (scan_stmt4 && gimple_code (scan_stmt4) == GIMPLE_OMP_SCAN);
10964 input_simd_ctx = maybe_lookup_ctx (input_stmt3)->outer;
10965 scan_simd_ctx = maybe_lookup_ctx (input_stmt4)->outer;
10968 tree num_threads = create_tmp_var (integer_type_node);
10969 tree thread_num = create_tmp_var (integer_type_node);
10970 tree nthreads_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
10971 tree threadnum_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
10972 gimple *g = gimple_build_call (nthreads_decl, 0);
10973 gimple_call_set_lhs (g, num_threads);
10974 gimple_seq_add_stmt (body_p, g);
10975 g = gimple_build_call (threadnum_decl, 0);
10976 gimple_call_set_lhs (g, thread_num);
10977 gimple_seq_add_stmt (body_p, g);
10979 tree ivar = create_tmp_var (sizetype);
10980 tree new_clauses1 = NULL_TREE, new_clauses2 = NULL_TREE;
10981 tree *cp1 = &new_clauses1, *cp2 = &new_clauses2;
10982 tree k = create_tmp_var (unsigned_type_node);
10983 tree l = create_tmp_var (unsigned_type_node);
10985 gimple_seq clist = NULL, mdlist = NULL;
10986 gimple_seq thr01_list = NULL, thrn1_list = NULL;
10987 gimple_seq thr02_list = NULL, thrn2_list = NULL;
10988 gimple_seq scan1_list = NULL, input2_list = NULL;
10989 gimple_seq last_list = NULL, reduc_list = NULL;
10990 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
10991 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
10992 && OMP_CLAUSE_REDUCTION_INSCAN (c))
10994 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10995 tree var = OMP_CLAUSE_DECL (c);
10996 tree new_var = lookup_decl (var, ctx);
10997 tree var3 = NULL_TREE;
10998 tree new_vard = new_var;
10999 if (omp_privatize_by_reference (var))
11000 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
11001 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
11003 var3 = maybe_lookup_decl (new_vard, ctx);
11004 if (var3 == new_vard)
11005 var3 = NULL_TREE;
11008 tree ptype = build_pointer_type (TREE_TYPE (new_var));
11009 tree rpriva = create_tmp_var (ptype);
11010 tree nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
11011 OMP_CLAUSE_DECL (nc) = rpriva;
11012 *cp1 = nc;
11013 cp1 = &OMP_CLAUSE_CHAIN (nc);
11015 tree rprivb = create_tmp_var (ptype);
11016 nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
11017 OMP_CLAUSE_DECL (nc) = rprivb;
11018 OMP_CLAUSE__SCANTEMP__ALLOC (nc) = 1;
11019 *cp1 = nc;
11020 cp1 = &OMP_CLAUSE_CHAIN (nc);
11022 tree var2 = create_tmp_var_raw (TREE_TYPE (new_var));
11023 if (new_vard != new_var)
11024 TREE_ADDRESSABLE (var2) = 1;
11025 gimple_add_tmp_var (var2);
11027 tree x = fold_convert_loc (clause_loc, sizetype, thread_num);
11028 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
11029 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
11030 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
11031 tree rpriva_ref = build_simple_mem_ref_loc (clause_loc, x);
11033 x = fold_build2_loc (clause_loc, PLUS_EXPR, integer_type_node,
11034 thread_num, integer_minus_one_node);
11035 x = fold_convert_loc (clause_loc, sizetype, x);
11036 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
11037 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
11038 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
11039 tree rprivam1_ref = build_simple_mem_ref_loc (clause_loc, x);
11041 x = fold_convert_loc (clause_loc, sizetype, l);
11042 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
11043 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
11044 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
11045 tree rprival_ref = build_simple_mem_ref_loc (clause_loc, x);
11047 x = fold_build2_loc (clause_loc, MINUS_EXPR, unsigned_type_node, l, k);
11048 x = fold_convert_loc (clause_loc, sizetype, x);
11049 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
11050 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
11051 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
11052 tree rprivalmk_ref = build_simple_mem_ref_loc (clause_loc, x);
11054 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, ivar,
11055 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
11056 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rprivb), rprivb, x);
11057 tree rprivb_ref = build_simple_mem_ref_loc (clause_loc, x);
11059 tree var4 = is_for_simd ? new_var : var2;
11060 tree var5 = NULL_TREE, var6 = NULL_TREE;
11061 if (is_for_simd)
11063 var5 = lookup_decl (var, input_simd_ctx);
11064 var6 = lookup_decl (var, scan_simd_ctx);
11065 if (new_vard != new_var)
11067 var5 = build_simple_mem_ref_loc (clause_loc, var5);
11068 var6 = build_simple_mem_ref_loc (clause_loc, var6);
11071 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
11073 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
11074 tree val = var2;
11076 x = lang_hooks.decls.omp_clause_default_ctor
11077 (c, var2, build_outer_var_ref (var, ctx));
11078 if (x)
11079 gimplify_and_add (x, &clist);
11081 x = build_outer_var_ref (var, ctx);
11082 x = lang_hooks.decls.omp_clause_assign_op (c, unshare_expr (var4),
11084 gimplify_and_add (x, &thr01_list);
11086 tree y = (DECL_HAS_VALUE_EXPR_P (new_vard)
11087 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
11088 if (var3)
11090 x = unshare_expr (var4);
11091 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
11092 gimplify_and_add (x, &thrn1_list);
11093 x = unshare_expr (var4);
11094 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
11095 gimplify_and_add (x, &thr02_list);
11097 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
11099 /* Otherwise, assign to it the identity element. */
11100 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
11101 tseq = copy_gimple_seq_and_replace_locals (tseq);
11102 if (!is_for_simd)
11104 if (new_vard != new_var)
11105 val = build_fold_addr_expr_loc (clause_loc, val);
11106 SET_DECL_VALUE_EXPR (new_vard, val);
11107 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
11109 SET_DECL_VALUE_EXPR (placeholder, error_mark_node);
11110 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11111 lower_omp (&tseq, ctx);
11112 gimple_seq_add_seq (&thrn1_list, tseq);
11113 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
11114 lower_omp (&tseq, ctx);
11115 gimple_seq_add_seq (&thr02_list, tseq);
11116 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
11117 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
11118 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
11119 if (y)
11120 SET_DECL_VALUE_EXPR (new_vard, y);
11121 else
11123 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
11124 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
11128 x = unshare_expr (var4);
11129 x = lang_hooks.decls.omp_clause_assign_op (c, x, rprivam1_ref);
11130 gimplify_and_add (x, &thrn2_list);
11132 if (is_for_simd)
11134 x = unshare_expr (rprivb_ref);
11135 x = lang_hooks.decls.omp_clause_assign_op (c, x, var5);
11136 gimplify_and_add (x, &scan1_list);
11138 else
11140 if (ctx->scan_exclusive)
11142 x = unshare_expr (rprivb_ref);
11143 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
11144 gimplify_and_add (x, &scan1_list);
11147 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
11148 tseq = copy_gimple_seq_and_replace_locals (tseq);
11149 SET_DECL_VALUE_EXPR (placeholder, var2);
11150 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11151 lower_omp (&tseq, ctx);
11152 gimple_seq_add_seq (&scan1_list, tseq);
11154 if (ctx->scan_inclusive)
11156 x = unshare_expr (rprivb_ref);
11157 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
11158 gimplify_and_add (x, &scan1_list);
11162 x = unshare_expr (rpriva_ref);
11163 x = lang_hooks.decls.omp_clause_assign_op (c, x,
11164 unshare_expr (var4));
11165 gimplify_and_add (x, &mdlist);
11167 x = unshare_expr (is_for_simd ? var6 : new_var);
11168 x = lang_hooks.decls.omp_clause_assign_op (c, x, var4);
11169 gimplify_and_add (x, &input2_list);
11171 val = rprivb_ref;
11172 if (new_vard != new_var)
11173 val = build_fold_addr_expr_loc (clause_loc, val);
11175 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
11176 tseq = copy_gimple_seq_and_replace_locals (tseq);
11177 SET_DECL_VALUE_EXPR (new_vard, val);
11178 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
11179 if (is_for_simd)
11181 SET_DECL_VALUE_EXPR (placeholder, var6);
11182 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11184 else
11185 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
11186 lower_omp (&tseq, ctx);
11187 if (y)
11188 SET_DECL_VALUE_EXPR (new_vard, y);
11189 else
11191 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
11192 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
11194 if (!is_for_simd)
11196 SET_DECL_VALUE_EXPR (placeholder, new_var);
11197 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11198 lower_omp (&tseq, ctx);
11200 gimple_seq_add_seq (&input2_list, tseq);
11202 x = build_outer_var_ref (var, ctx);
11203 x = lang_hooks.decls.omp_clause_assign_op (c, x, rpriva_ref);
11204 gimplify_and_add (x, &last_list);
11206 x = lang_hooks.decls.omp_clause_assign_op (c, var2, rprivalmk_ref);
11207 gimplify_and_add (x, &reduc_list);
11208 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
11209 tseq = copy_gimple_seq_and_replace_locals (tseq);
11210 val = rprival_ref;
11211 if (new_vard != new_var)
11212 val = build_fold_addr_expr_loc (clause_loc, val);
11213 SET_DECL_VALUE_EXPR (new_vard, val);
11214 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
11215 SET_DECL_VALUE_EXPR (placeholder, var2);
11216 lower_omp (&tseq, ctx);
11217 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
11218 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
11219 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
11220 if (y)
11221 SET_DECL_VALUE_EXPR (new_vard, y);
11222 else
11224 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
11225 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
11227 gimple_seq_add_seq (&reduc_list, tseq);
11228 x = lang_hooks.decls.omp_clause_assign_op (c, rprival_ref, var2);
11229 gimplify_and_add (x, &reduc_list);
11231 x = lang_hooks.decls.omp_clause_dtor (c, var2);
11232 if (x)
11233 gimplify_and_add (x, dlist);
11235 else
11237 x = build_outer_var_ref (var, ctx);
11238 gimplify_assign (unshare_expr (var4), x, &thr01_list);
11240 x = omp_reduction_init (c, TREE_TYPE (new_var));
11241 gimplify_assign (unshare_expr (var4), unshare_expr (x),
11242 &thrn1_list);
11243 gimplify_assign (unshare_expr (var4), x, &thr02_list);
11245 gimplify_assign (unshare_expr (var4), rprivam1_ref, &thrn2_list);
11247 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
11248 if (code == MINUS_EXPR)
11249 code = PLUS_EXPR;
11251 if (is_for_simd)
11252 gimplify_assign (unshare_expr (rprivb_ref), var5, &scan1_list);
11253 else
11255 if (ctx->scan_exclusive)
11256 gimplify_assign (unshare_expr (rprivb_ref), var2,
11257 &scan1_list);
11258 x = build2 (code, TREE_TYPE (new_var), var2, new_var);
11259 gimplify_assign (var2, x, &scan1_list);
11260 if (ctx->scan_inclusive)
11261 gimplify_assign (unshare_expr (rprivb_ref), var2,
11262 &scan1_list);
11265 gimplify_assign (unshare_expr (rpriva_ref), unshare_expr (var4),
11266 &mdlist);
11268 x = build2 (code, TREE_TYPE (new_var), var4, rprivb_ref);
11269 gimplify_assign (is_for_simd ? var6 : new_var, x, &input2_list);
11271 gimplify_assign (build_outer_var_ref (var, ctx), rpriva_ref,
11272 &last_list);
11274 x = build2 (code, TREE_TYPE (new_var), rprivalmk_ref,
11275 unshare_expr (rprival_ref));
11276 gimplify_assign (rprival_ref, x, &reduc_list);
11280 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
11281 gimple_seq_add_stmt (&scan1_list, g);
11282 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
11283 gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
11284 ? scan_stmt4 : scan_stmt2), g);
11286 tree controlb = create_tmp_var (boolean_type_node);
11287 tree controlp = create_tmp_var (ptr_type_node);
11288 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11289 OMP_CLAUSE_DECL (nc) = controlb;
11290 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11291 *cp1 = nc;
11292 cp1 = &OMP_CLAUSE_CHAIN (nc);
11293 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11294 OMP_CLAUSE_DECL (nc) = controlp;
11295 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11296 *cp1 = nc;
11297 cp1 = &OMP_CLAUSE_CHAIN (nc);
11298 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11299 OMP_CLAUSE_DECL (nc) = controlb;
11300 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11301 *cp2 = nc;
11302 cp2 = &OMP_CLAUSE_CHAIN (nc);
11303 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11304 OMP_CLAUSE_DECL (nc) = controlp;
11305 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11306 *cp2 = nc;
11307 cp2 = &OMP_CLAUSE_CHAIN (nc);
11309 *cp1 = gimple_omp_for_clauses (stmt);
11310 gimple_omp_for_set_clauses (stmt, new_clauses1);
11311 *cp2 = gimple_omp_for_clauses (new_stmt);
11312 gimple_omp_for_set_clauses (new_stmt, new_clauses2);
11314 if (is_for_simd)
11316 gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3), scan1_list);
11317 gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4), input2_list);
11319 gsi_insert_seq_after (&input3_gsi, gimple_omp_body (input_stmt3),
11320 GSI_SAME_STMT);
11321 gsi_remove (&input3_gsi, true);
11322 gsi_insert_seq_after (&scan3_gsi, gimple_omp_body (scan_stmt3),
11323 GSI_SAME_STMT);
11324 gsi_remove (&scan3_gsi, true);
11325 gsi_insert_seq_after (&input4_gsi, gimple_omp_body (input_stmt4),
11326 GSI_SAME_STMT);
11327 gsi_remove (&input4_gsi, true);
11328 gsi_insert_seq_after (&scan4_gsi, gimple_omp_body (scan_stmt4),
11329 GSI_SAME_STMT);
11330 gsi_remove (&scan4_gsi, true);
11332 else
11334 gimple_omp_set_body (scan_stmt1, scan1_list);
11335 gimple_omp_set_body (input_stmt2, input2_list);
11338 gsi_insert_seq_after (&input1_gsi, gimple_omp_body (input_stmt1),
11339 GSI_SAME_STMT);
11340 gsi_remove (&input1_gsi, true);
11341 gsi_insert_seq_after (&scan1_gsi, gimple_omp_body (scan_stmt1),
11342 GSI_SAME_STMT);
11343 gsi_remove (&scan1_gsi, true);
11344 gsi_insert_seq_after (&input2_gsi, gimple_omp_body (input_stmt2),
11345 GSI_SAME_STMT);
11346 gsi_remove (&input2_gsi, true);
11347 gsi_insert_seq_after (&scan2_gsi, gimple_omp_body (scan_stmt2),
11348 GSI_SAME_STMT);
11349 gsi_remove (&scan2_gsi, true);
11351 gimple_seq_add_seq (body_p, clist);
11353 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
11354 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
11355 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
11356 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
11357 gimple_seq_add_stmt (body_p, g);
11358 g = gimple_build_label (lab1);
11359 gimple_seq_add_stmt (body_p, g);
11360 gimple_seq_add_seq (body_p, thr01_list);
11361 g = gimple_build_goto (lab3);
11362 gimple_seq_add_stmt (body_p, g);
11363 g = gimple_build_label (lab2);
11364 gimple_seq_add_stmt (body_p, g);
11365 gimple_seq_add_seq (body_p, thrn1_list);
11366 g = gimple_build_label (lab3);
11367 gimple_seq_add_stmt (body_p, g);
11369 g = gimple_build_assign (ivar, size_zero_node);
11370 gimple_seq_add_stmt (body_p, g);
11372 gimple_seq_add_stmt (body_p, stmt);
11373 gimple_seq_add_seq (body_p, body);
11374 gimple_seq_add_stmt (body_p, gimple_build_omp_continue (fd->loop.v,
11375 fd->loop.v));
11377 g = gimple_build_omp_return (true);
11378 gimple_seq_add_stmt (body_p, g);
11379 gimple_seq_add_seq (body_p, mdlist);
11381 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11382 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11383 g = gimple_build_cond (GT_EXPR, num_threads, integer_one_node, lab1, lab2);
11384 gimple_seq_add_stmt (body_p, g);
11385 g = gimple_build_label (lab1);
11386 gimple_seq_add_stmt (body_p, g);
11388 g = omp_build_barrier (NULL);
11389 gimple_seq_add_stmt (body_p, g);
11391 tree down = create_tmp_var (unsigned_type_node);
11392 g = gimple_build_assign (down, build_zero_cst (unsigned_type_node));
11393 gimple_seq_add_stmt (body_p, g);
11395 g = gimple_build_assign (k, build_one_cst (unsigned_type_node));
11396 gimple_seq_add_stmt (body_p, g);
11398 tree num_threadsu = create_tmp_var (unsigned_type_node);
11399 g = gimple_build_assign (num_threadsu, NOP_EXPR, num_threads);
11400 gimple_seq_add_stmt (body_p, g);
11402 tree thread_numu = create_tmp_var (unsigned_type_node);
11403 g = gimple_build_assign (thread_numu, NOP_EXPR, thread_num);
11404 gimple_seq_add_stmt (body_p, g);
11406 tree thread_nump1 = create_tmp_var (unsigned_type_node);
11407 g = gimple_build_assign (thread_nump1, PLUS_EXPR, thread_numu,
11408 build_int_cst (unsigned_type_node, 1));
11409 gimple_seq_add_stmt (body_p, g);
11411 lab3 = create_artificial_label (UNKNOWN_LOCATION);
11412 g = gimple_build_label (lab3);
11413 gimple_seq_add_stmt (body_p, g);
11415 tree twok = create_tmp_var (unsigned_type_node);
11416 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
11417 gimple_seq_add_stmt (body_p, g);
11419 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
11420 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
11421 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
11422 g = gimple_build_cond (GT_EXPR, twok, num_threadsu, lab4, lab5);
11423 gimple_seq_add_stmt (body_p, g);
11424 g = gimple_build_label (lab4);
11425 gimple_seq_add_stmt (body_p, g);
11426 g = gimple_build_assign (down, build_all_ones_cst (unsigned_type_node));
11427 gimple_seq_add_stmt (body_p, g);
11428 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11429 gimple_seq_add_stmt (body_p, g);
11431 g = gimple_build_cond (EQ_EXPR, k, num_threadsu, lab6, lab5);
11432 gimple_seq_add_stmt (body_p, g);
11433 g = gimple_build_label (lab6);
11434 gimple_seq_add_stmt (body_p, g);
11436 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11437 gimple_seq_add_stmt (body_p, g);
11439 g = gimple_build_label (lab5);
11440 gimple_seq_add_stmt (body_p, g);
11442 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
11443 gimple_seq_add_stmt (body_p, g);
11445 tree cplx = create_tmp_var (build_complex_type (unsigned_type_node, false));
11446 g = gimple_build_call_internal (IFN_MUL_OVERFLOW, 2, thread_nump1, twok);
11447 gimple_call_set_lhs (g, cplx);
11448 gimple_seq_add_stmt (body_p, g);
11449 tree mul = create_tmp_var (unsigned_type_node);
11450 g = gimple_build_assign (mul, REALPART_EXPR,
11451 build1 (REALPART_EXPR, unsigned_type_node, cplx));
11452 gimple_seq_add_stmt (body_p, g);
11453 tree ovf = create_tmp_var (unsigned_type_node);
11454 g = gimple_build_assign (ovf, IMAGPART_EXPR,
11455 build1 (IMAGPART_EXPR, unsigned_type_node, cplx));
11456 gimple_seq_add_stmt (body_p, g);
11458 tree lab7 = create_artificial_label (UNKNOWN_LOCATION);
11459 tree lab8 = create_artificial_label (UNKNOWN_LOCATION);
11460 g = gimple_build_cond (EQ_EXPR, ovf, build_zero_cst (unsigned_type_node),
11461 lab7, lab8);
11462 gimple_seq_add_stmt (body_p, g);
11463 g = gimple_build_label (lab7);
11464 gimple_seq_add_stmt (body_p, g);
11466 tree andv = create_tmp_var (unsigned_type_node);
11467 g = gimple_build_assign (andv, BIT_AND_EXPR, k, down);
11468 gimple_seq_add_stmt (body_p, g);
11469 tree andvm1 = create_tmp_var (unsigned_type_node);
11470 g = gimple_build_assign (andvm1, PLUS_EXPR, andv,
11471 build_minus_one_cst (unsigned_type_node));
11472 gimple_seq_add_stmt (body_p, g);
11474 g = gimple_build_assign (l, PLUS_EXPR, mul, andvm1);
11475 gimple_seq_add_stmt (body_p, g);
11477 tree lab9 = create_artificial_label (UNKNOWN_LOCATION);
11478 g = gimple_build_cond (LT_EXPR, l, num_threadsu, lab9, lab8);
11479 gimple_seq_add_stmt (body_p, g);
11480 g = gimple_build_label (lab9);
11481 gimple_seq_add_stmt (body_p, g);
11482 gimple_seq_add_seq (body_p, reduc_list);
11483 g = gimple_build_label (lab8);
11484 gimple_seq_add_stmt (body_p, g);
11486 tree lab10 = create_artificial_label (UNKNOWN_LOCATION);
11487 tree lab11 = create_artificial_label (UNKNOWN_LOCATION);
11488 tree lab12 = create_artificial_label (UNKNOWN_LOCATION);
11489 g = gimple_build_cond (EQ_EXPR, down, build_zero_cst (unsigned_type_node),
11490 lab10, lab11);
11491 gimple_seq_add_stmt (body_p, g);
11492 g = gimple_build_label (lab10);
11493 gimple_seq_add_stmt (body_p, g);
11494 g = gimple_build_assign (k, LSHIFT_EXPR, k, integer_one_node);
11495 gimple_seq_add_stmt (body_p, g);
11496 g = gimple_build_goto (lab12);
11497 gimple_seq_add_stmt (body_p, g);
11498 g = gimple_build_label (lab11);
11499 gimple_seq_add_stmt (body_p, g);
11500 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11501 gimple_seq_add_stmt (body_p, g);
11502 g = gimple_build_label (lab12);
11503 gimple_seq_add_stmt (body_p, g);
11505 g = omp_build_barrier (NULL);
11506 gimple_seq_add_stmt (body_p, g);
11508 g = gimple_build_cond (NE_EXPR, k, build_zero_cst (unsigned_type_node),
11509 lab3, lab2);
11510 gimple_seq_add_stmt (body_p, g);
11512 g = gimple_build_label (lab2);
11513 gimple_seq_add_stmt (body_p, g);
11515 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11516 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11517 lab3 = create_artificial_label (UNKNOWN_LOCATION);
11518 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
11519 gimple_seq_add_stmt (body_p, g);
11520 g = gimple_build_label (lab1);
11521 gimple_seq_add_stmt (body_p, g);
11522 gimple_seq_add_seq (body_p, thr02_list);
11523 g = gimple_build_goto (lab3);
11524 gimple_seq_add_stmt (body_p, g);
11525 g = gimple_build_label (lab2);
11526 gimple_seq_add_stmt (body_p, g);
11527 gimple_seq_add_seq (body_p, thrn2_list);
11528 g = gimple_build_label (lab3);
11529 gimple_seq_add_stmt (body_p, g);
11531 g = gimple_build_assign (ivar, size_zero_node);
11532 gimple_seq_add_stmt (body_p, g);
11533 gimple_seq_add_stmt (body_p, new_stmt);
11534 gimple_seq_add_seq (body_p, new_body);
11536 gimple_seq new_dlist = NULL;
11537 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11538 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11539 tree num_threadsm1 = create_tmp_var (integer_type_node);
11540 g = gimple_build_assign (num_threadsm1, PLUS_EXPR, num_threads,
11541 integer_minus_one_node);
11542 gimple_seq_add_stmt (&new_dlist, g);
11543 g = gimple_build_cond (EQ_EXPR, thread_num, num_threadsm1, lab1, lab2);
11544 gimple_seq_add_stmt (&new_dlist, g);
11545 g = gimple_build_label (lab1);
11546 gimple_seq_add_stmt (&new_dlist, g);
11547 gimple_seq_add_seq (&new_dlist, last_list);
11548 g = gimple_build_label (lab2);
11549 gimple_seq_add_stmt (&new_dlist, g);
11550 gimple_seq_add_seq (&new_dlist, *dlist);
11551 *dlist = new_dlist;
11554 /* Build an internal UNIQUE function with type IFN_UNIQUE_OACC_PRIVATE listing
11555 the addresses of variables to be made private at the surrounding
11556 parallelism level. Such functions appear in the gimple code stream in two
11557 forms, e.g. for a partitioned loop:
11559 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6, 1, 68);
11560 .data_dep.6 = .UNIQUE (OACC_PRIVATE, .data_dep.6, -1, &w);
11561 .data_dep.6 = .UNIQUE (OACC_FORK, .data_dep.6, -1);
11562 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6);
11564 or alternatively, OACC_PRIVATE can appear at the top level of a parallel,
11565 not as part of a HEAD_MARK sequence:
11567 .UNIQUE (OACC_PRIVATE, 0, 0, &w);
11569 For such stand-alone appearances, the 3rd argument is always 0, denoting
11570 gang partitioning. */
11572 static gcall *
11573 lower_oacc_private_marker (omp_context *ctx)
11575 if (ctx->oacc_privatization_candidates.length () == 0)
11576 return NULL;
11578 auto_vec<tree, 5> args;
11580 args.quick_push (build_int_cst (integer_type_node, IFN_UNIQUE_OACC_PRIVATE));
11581 args.quick_push (integer_zero_node);
11582 args.quick_push (integer_minus_one_node);
11584 int i;
11585 tree decl;
11586 FOR_EACH_VEC_ELT (ctx->oacc_privatization_candidates, i, decl)
11588 gcc_checking_assert (TREE_ADDRESSABLE (decl));
11589 tree addr = build_fold_addr_expr (decl);
11590 args.safe_push (addr);
11593 return gimple_build_call_internal_vec (IFN_UNIQUE, args);
11596 /* Lower code for an OMP loop directive. */
11598 static void
11599 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11601 tree *rhs_p, block;
11602 struct omp_for_data fd, *fdp = NULL;
11603 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
11604 gbind *new_stmt;
11605 gimple_seq omp_for_body, body, dlist, tred_ilist = NULL, tred_dlist = NULL;
11606 gimple_seq cnt_list = NULL, clist = NULL;
11607 gimple_seq oacc_head = NULL, oacc_tail = NULL;
11608 size_t i;
11610 push_gimplify_context ();
11612 if (is_gimple_omp_oacc (ctx->stmt))
11613 oacc_privatization_scan_clause_chain (ctx, gimple_omp_for_clauses (stmt));
11615 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
11617 block = make_node (BLOCK);
11618 new_stmt = gimple_build_bind (NULL, NULL, block);
11619 /* Replace at gsi right away, so that 'stmt' is no member
11620 of a sequence anymore as we're going to add to a different
11621 one below. */
11622 gsi_replace (gsi_p, new_stmt, true);
11624 /* Move declaration of temporaries in the loop body before we make
11625 it go away. */
11626 omp_for_body = gimple_omp_body (stmt);
11627 if (!gimple_seq_empty_p (omp_for_body)
11628 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
11630 gbind *inner_bind
11631 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
11632 tree vars = gimple_bind_vars (inner_bind);
11633 if (is_gimple_omp_oacc (ctx->stmt))
11634 oacc_privatization_scan_decl_chain (ctx, vars);
11635 gimple_bind_append_vars (new_stmt, vars);
11636 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
11637 keep them on the inner_bind and it's block. */
11638 gimple_bind_set_vars (inner_bind, NULL_TREE);
11639 if (gimple_bind_block (inner_bind))
11640 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
11643 if (gimple_omp_for_combined_into_p (stmt))
11645 omp_extract_for_data (stmt, &fd, NULL);
11646 fdp = &fd;
11648 /* We need two temporaries with fd.loop.v type (istart/iend)
11649 and then (fd.collapse - 1) temporaries with the same
11650 type for count2 ... countN-1 vars if not constant. */
11651 size_t count = 2;
11652 tree type = fd.iter_type;
11653 if (fd.collapse > 1
11654 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
11655 count += fd.collapse - 1;
11656 size_t count2 = 0;
11657 tree type2 = NULL_TREE;
11658 bool taskreg_for
11659 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
11660 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
11661 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
11662 tree simtc = NULL;
11663 tree clauses = *pc;
11664 if (fd.collapse > 1
11665 && fd.non_rect
11666 && fd.last_nonrect == fd.first_nonrect + 1
11667 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
11668 if (tree v = gimple_omp_for_index (stmt, fd.last_nonrect))
11669 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
11671 v = gimple_omp_for_index (stmt, fd.first_nonrect);
11672 type2 = TREE_TYPE (v);
11673 count++;
11674 count2 = 3;
11676 if (taskreg_for)
11677 outerc
11678 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
11679 OMP_CLAUSE__LOOPTEMP_);
11680 if (ctx->simt_stmt)
11681 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
11682 OMP_CLAUSE__LOOPTEMP_);
11683 for (i = 0; i < count + count2; i++)
11685 tree temp;
11686 if (taskreg_for)
11688 gcc_assert (outerc);
11689 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
11690 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
11691 OMP_CLAUSE__LOOPTEMP_);
11693 else
11695 /* If there are 2 adjacent SIMD stmts, one with _simt_
11696 clause, another without, make sure they have the same
11697 decls in _looptemp_ clauses, because the outer stmt
11698 they are combined into will look up just one inner_stmt. */
11699 if (ctx->simt_stmt)
11700 temp = OMP_CLAUSE_DECL (simtc);
11701 else
11702 temp = create_tmp_var (i >= count ? type2 : type);
11703 insert_decl_map (&ctx->outer->cb, temp, temp);
11705 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
11706 OMP_CLAUSE_DECL (*pc) = temp;
11707 pc = &OMP_CLAUSE_CHAIN (*pc);
11708 if (ctx->simt_stmt)
11709 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
11710 OMP_CLAUSE__LOOPTEMP_);
11712 *pc = clauses;
11715 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
11716 dlist = NULL;
11717 body = NULL;
11718 tree rclauses
11719 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt), OMP_FOR,
11720 OMP_CLAUSE_REDUCTION);
11721 tree rtmp = NULL_TREE;
11722 if (rclauses)
11724 tree type = build_pointer_type (pointer_sized_int_node);
11725 tree temp = create_tmp_var (type);
11726 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
11727 OMP_CLAUSE_DECL (c) = temp;
11728 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (stmt);
11729 gimple_omp_for_set_clauses (stmt, c);
11730 lower_omp_task_reductions (ctx, OMP_FOR,
11731 gimple_omp_for_clauses (stmt),
11732 &tred_ilist, &tred_dlist);
11733 rclauses = c;
11734 rtmp = make_ssa_name (type);
11735 gimple_seq_add_stmt (&body, gimple_build_assign (rtmp, temp));
11738 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt),
11739 ctx);
11741 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
11742 fdp);
11743 gimple_seq_add_seq (rclauses ? &tred_ilist : &body,
11744 gimple_omp_for_pre_body (stmt));
11746 lower_omp (gimple_omp_body_ptr (stmt), ctx);
11748 gcall *private_marker = NULL;
11749 if (is_gimple_omp_oacc (ctx->stmt)
11750 && !gimple_seq_empty_p (omp_for_body))
11751 private_marker = lower_oacc_private_marker (ctx);
11753 /* Lower the header expressions. At this point, we can assume that
11754 the header is of the form:
11756 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
11758 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
11759 using the .omp_data_s mapping, if needed. */
11760 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
11762 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
11763 if (TREE_CODE (*rhs_p) == TREE_VEC)
11765 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
11766 TREE_VEC_ELT (*rhs_p, 1)
11767 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
11768 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
11769 TREE_VEC_ELT (*rhs_p, 2)
11770 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
11772 else if (!is_gimple_min_invariant (*rhs_p))
11773 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11774 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
11775 recompute_tree_invariant_for_addr_expr (*rhs_p);
11777 rhs_p = gimple_omp_for_final_ptr (stmt, i);
11778 if (TREE_CODE (*rhs_p) == TREE_VEC)
11780 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
11781 TREE_VEC_ELT (*rhs_p, 1)
11782 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
11783 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
11784 TREE_VEC_ELT (*rhs_p, 2)
11785 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
11787 else if (!is_gimple_min_invariant (*rhs_p))
11788 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11789 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
11790 recompute_tree_invariant_for_addr_expr (*rhs_p);
11792 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
11793 if (!is_gimple_min_invariant (*rhs_p))
11794 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11796 if (rclauses)
11797 gimple_seq_add_seq (&tred_ilist, cnt_list);
11798 else
11799 gimple_seq_add_seq (&body, cnt_list);
11801 /* Once lowered, extract the bounds and clauses. */
11802 omp_extract_for_data (stmt, &fd, NULL);
11804 if (is_gimple_omp_oacc (ctx->stmt)
11805 && !ctx_in_oacc_kernels_region (ctx))
11806 lower_oacc_head_tail (gimple_location (stmt),
11807 gimple_omp_for_clauses (stmt), private_marker,
11808 &oacc_head, &oacc_tail, ctx);
11810 /* Add OpenACC partitioning and reduction markers just before the loop. */
11811 if (oacc_head)
11812 gimple_seq_add_seq (&body, oacc_head);
11814 lower_omp_for_lastprivate (&fd, &body, &dlist, &clist, ctx);
11816 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
11817 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
11818 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11819 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
11821 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
11822 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
11823 OMP_CLAUSE_LINEAR_STEP (c)
11824 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
11825 ctx);
11828 if ((ctx->scan_inclusive || ctx->scan_exclusive)
11829 && gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
11830 lower_omp_for_scan (&body, &dlist, stmt, &fd, ctx);
11831 else
11833 gimple_seq_add_stmt (&body, stmt);
11834 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
11837 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
11838 fd.loop.v));
11840 /* After the loop, add exit clauses. */
11841 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, &clist, ctx);
11843 if (clist)
11845 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
11846 gcall *g = gimple_build_call (fndecl, 0);
11847 gimple_seq_add_stmt (&body, g);
11848 gimple_seq_add_seq (&body, clist);
11849 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
11850 g = gimple_build_call (fndecl, 0);
11851 gimple_seq_add_stmt (&body, g);
11854 if (ctx->cancellable)
11855 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
11857 gimple_seq_add_seq (&body, dlist);
11859 if (rclauses)
11861 gimple_seq_add_seq (&tred_ilist, body);
11862 body = tred_ilist;
11865 body = maybe_catch_exception (body);
11867 /* Region exit marker goes at the end of the loop body. */
11868 gimple *g = gimple_build_omp_return (fd.have_nowait);
11869 gimple_seq_add_stmt (&body, g);
11871 gimple_seq_add_seq (&body, tred_dlist);
11873 maybe_add_implicit_barrier_cancel (ctx, g, &body);
11875 if (rclauses)
11876 OMP_CLAUSE_DECL (rclauses) = rtmp;
11878 /* Add OpenACC joining and reduction markers just after the loop. */
11879 if (oacc_tail)
11880 gimple_seq_add_seq (&body, oacc_tail);
11882 pop_gimplify_context (new_stmt);
11884 gimple_bind_append_vars (new_stmt, ctx->block_vars);
11885 maybe_remove_omp_member_access_dummy_vars (new_stmt);
11886 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
11887 if (BLOCK_VARS (block))
11888 TREE_USED (block) = 1;
11890 gimple_bind_set_body (new_stmt, body);
11891 gimple_omp_set_body (stmt, NULL);
11892 gimple_omp_for_set_pre_body (stmt, NULL);
11895 /* Callback for walk_stmts. Check if the current statement only contains
11896 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
11898 static tree
11899 check_combined_parallel (gimple_stmt_iterator *gsi_p,
11900 bool *handled_ops_p,
11901 struct walk_stmt_info *wi)
11903 int *info = (int *) wi->info;
11904 gimple *stmt = gsi_stmt (*gsi_p);
11906 *handled_ops_p = true;
11907 switch (gimple_code (stmt))
11909 WALK_SUBSTMTS;
11911 case GIMPLE_DEBUG:
11912 break;
11913 case GIMPLE_OMP_FOR:
11914 case GIMPLE_OMP_SECTIONS:
11915 *info = *info == 0 ? 1 : -1;
11916 break;
11917 default:
11918 *info = -1;
11919 break;
11921 return NULL;
11924 struct omp_taskcopy_context
11926 /* This field must be at the beginning, as we do "inheritance": Some
11927 callback functions for tree-inline.cc (e.g., omp_copy_decl)
11928 receive a copy_body_data pointer that is up-casted to an
11929 omp_context pointer. */
11930 copy_body_data cb;
11931 omp_context *ctx;
11934 static tree
11935 task_copyfn_copy_decl (tree var, copy_body_data *cb)
11937 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
11939 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
11940 return create_tmp_var (TREE_TYPE (var));
11942 return var;
11945 static tree
11946 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
11948 tree name, new_fields = NULL, type, f;
11950 type = lang_hooks.types.make_type (RECORD_TYPE);
11951 name = DECL_NAME (TYPE_NAME (orig_type));
11952 name = build_decl (gimple_location (tcctx->ctx->stmt),
11953 TYPE_DECL, name, type);
11954 TYPE_NAME (type) = name;
11956 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
11958 tree new_f = copy_node (f);
11959 DECL_CONTEXT (new_f) = type;
11960 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
11961 TREE_CHAIN (new_f) = new_fields;
11962 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
11963 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
11964 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
11965 &tcctx->cb, NULL);
11966 new_fields = new_f;
11967 tcctx->cb.decl_map->put (f, new_f);
11969 TYPE_FIELDS (type) = nreverse (new_fields);
11970 layout_type (type);
11971 return type;
11974 /* Create task copyfn. */
11976 static void
11977 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
11979 struct function *child_cfun;
11980 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
11981 tree record_type, srecord_type, bind, list;
11982 bool record_needs_remap = false, srecord_needs_remap = false;
11983 splay_tree_node n;
11984 struct omp_taskcopy_context tcctx;
11985 location_t loc = gimple_location (task_stmt);
11986 size_t looptempno = 0;
11988 child_fn = gimple_omp_task_copy_fn (task_stmt);
11989 task_cpyfns.safe_push (task_stmt);
11990 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
11991 gcc_assert (child_cfun->cfg == NULL);
11992 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
11994 /* Reset DECL_CONTEXT on function arguments. */
11995 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
11996 DECL_CONTEXT (t) = child_fn;
11998 /* Populate the function. */
11999 push_gimplify_context ();
12000 push_cfun (child_cfun);
12002 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
12003 TREE_SIDE_EFFECTS (bind) = 1;
12004 list = NULL;
12005 DECL_SAVED_TREE (child_fn) = bind;
12006 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
12008 /* Remap src and dst argument types if needed. */
12009 record_type = ctx->record_type;
12010 srecord_type = ctx->srecord_type;
12011 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
12012 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
12014 record_needs_remap = true;
12015 break;
12017 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
12018 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
12020 srecord_needs_remap = true;
12021 break;
12024 if (record_needs_remap || srecord_needs_remap)
12026 memset (&tcctx, '\0', sizeof (tcctx));
12027 tcctx.cb.src_fn = ctx->cb.src_fn;
12028 tcctx.cb.dst_fn = child_fn;
12029 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
12030 gcc_checking_assert (tcctx.cb.src_node);
12031 tcctx.cb.dst_node = tcctx.cb.src_node;
12032 tcctx.cb.src_cfun = ctx->cb.src_cfun;
12033 tcctx.cb.copy_decl = task_copyfn_copy_decl;
12034 tcctx.cb.eh_lp_nr = 0;
12035 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
12036 tcctx.cb.decl_map = new hash_map<tree, tree>;
12037 tcctx.ctx = ctx;
12039 if (record_needs_remap)
12040 record_type = task_copyfn_remap_type (&tcctx, record_type);
12041 if (srecord_needs_remap)
12042 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
12044 else
12045 tcctx.cb.decl_map = NULL;
12047 arg = DECL_ARGUMENTS (child_fn);
12048 TREE_TYPE (arg) = build_pointer_type (record_type);
12049 sarg = DECL_CHAIN (arg);
12050 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
12052 /* First pass: initialize temporaries used in record_type and srecord_type
12053 sizes and field offsets. */
12054 if (tcctx.cb.decl_map)
12055 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
12056 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12058 tree *p;
12060 decl = OMP_CLAUSE_DECL (c);
12061 p = tcctx.cb.decl_map->get (decl);
12062 if (p == NULL)
12063 continue;
12064 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
12065 sf = (tree) n->value;
12066 sf = *tcctx.cb.decl_map->get (sf);
12067 src = build_simple_mem_ref_loc (loc, sarg);
12068 src = omp_build_component_ref (src, sf);
12069 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
12070 append_to_statement_list (t, &list);
12073 /* Second pass: copy shared var pointers and copy construct non-VLA
12074 firstprivate vars. */
12075 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
12076 switch (OMP_CLAUSE_CODE (c))
12078 splay_tree_key key;
12079 case OMP_CLAUSE_SHARED:
12080 decl = OMP_CLAUSE_DECL (c);
12081 key = (splay_tree_key) decl;
12082 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
12083 key = (splay_tree_key) &DECL_UID (decl);
12084 n = splay_tree_lookup (ctx->field_map, key);
12085 if (n == NULL)
12086 break;
12087 f = (tree) n->value;
12088 if (tcctx.cb.decl_map)
12089 f = *tcctx.cb.decl_map->get (f);
12090 n = splay_tree_lookup (ctx->sfield_map, key);
12091 sf = (tree) n->value;
12092 if (tcctx.cb.decl_map)
12093 sf = *tcctx.cb.decl_map->get (sf);
12094 src = build_simple_mem_ref_loc (loc, sarg);
12095 src = omp_build_component_ref (src, sf);
12096 dst = build_simple_mem_ref_loc (loc, arg);
12097 dst = omp_build_component_ref (dst, f);
12098 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12099 append_to_statement_list (t, &list);
12100 break;
12101 case OMP_CLAUSE_REDUCTION:
12102 case OMP_CLAUSE_IN_REDUCTION:
12103 decl = OMP_CLAUSE_DECL (c);
12104 if (TREE_CODE (decl) == MEM_REF)
12106 decl = TREE_OPERAND (decl, 0);
12107 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
12108 decl = TREE_OPERAND (decl, 0);
12109 if (TREE_CODE (decl) == INDIRECT_REF
12110 || TREE_CODE (decl) == ADDR_EXPR)
12111 decl = TREE_OPERAND (decl, 0);
12113 key = (splay_tree_key) decl;
12114 n = splay_tree_lookup (ctx->field_map, key);
12115 if (n == NULL)
12116 break;
12117 f = (tree) n->value;
12118 if (tcctx.cb.decl_map)
12119 f = *tcctx.cb.decl_map->get (f);
12120 n = splay_tree_lookup (ctx->sfield_map, key);
12121 sf = (tree) n->value;
12122 if (tcctx.cb.decl_map)
12123 sf = *tcctx.cb.decl_map->get (sf);
12124 src = build_simple_mem_ref_loc (loc, sarg);
12125 src = omp_build_component_ref (src, sf);
12126 if (decl != OMP_CLAUSE_DECL (c)
12127 && TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
12128 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
12129 src = build_simple_mem_ref_loc (loc, src);
12130 dst = build_simple_mem_ref_loc (loc, arg);
12131 dst = omp_build_component_ref (dst, f);
12132 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12133 append_to_statement_list (t, &list);
12134 break;
12135 case OMP_CLAUSE__LOOPTEMP_:
12136 /* Fields for first two _looptemp_ clauses are initialized by
12137 GOMP_taskloop*, the rest are handled like firstprivate. */
12138 if (looptempno < 2)
12140 looptempno++;
12141 break;
12143 /* FALLTHRU */
12144 case OMP_CLAUSE__REDUCTEMP_:
12145 case OMP_CLAUSE_FIRSTPRIVATE:
12146 decl = OMP_CLAUSE_DECL (c);
12147 if (is_variable_sized (decl))
12148 break;
12149 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
12150 if (n == NULL)
12151 break;
12152 f = (tree) n->value;
12153 if (tcctx.cb.decl_map)
12154 f = *tcctx.cb.decl_map->get (f);
12155 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
12156 if (n != NULL)
12158 sf = (tree) n->value;
12159 if (tcctx.cb.decl_map)
12160 sf = *tcctx.cb.decl_map->get (sf);
12161 src = build_simple_mem_ref_loc (loc, sarg);
12162 src = omp_build_component_ref (src, sf);
12163 if (use_pointer_for_field (decl, NULL)
12164 || omp_privatize_by_reference (decl))
12165 src = build_simple_mem_ref_loc (loc, src);
12167 else
12168 src = decl;
12169 dst = build_simple_mem_ref_loc (loc, arg);
12170 dst = omp_build_component_ref (dst, f);
12171 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
12172 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12173 else
12175 if (ctx->allocate_map)
12176 if (tree *allocatorp = ctx->allocate_map->get (decl))
12178 tree allocator = *allocatorp;
12179 HOST_WIDE_INT ialign = 0;
12180 if (TREE_CODE (allocator) == TREE_LIST)
12182 ialign = tree_to_uhwi (TREE_VALUE (allocator));
12183 allocator = TREE_PURPOSE (allocator);
12185 if (TREE_CODE (allocator) != INTEGER_CST)
12187 n = splay_tree_lookup (ctx->sfield_map,
12188 (splay_tree_key) allocator);
12189 allocator = (tree) n->value;
12190 if (tcctx.cb.decl_map)
12191 allocator = *tcctx.cb.decl_map->get (allocator);
12192 tree a = build_simple_mem_ref_loc (loc, sarg);
12193 allocator = omp_build_component_ref (a, allocator);
12195 allocator = fold_convert (pointer_sized_int_node, allocator);
12196 tree a = builtin_decl_explicit (BUILT_IN_GOMP_ALLOC);
12197 tree align = build_int_cst (size_type_node,
12198 MAX (ialign,
12199 DECL_ALIGN_UNIT (decl)));
12200 tree sz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dst)));
12201 tree ptr = build_call_expr_loc (loc, a, 3, align, sz,
12202 allocator);
12203 ptr = fold_convert (TREE_TYPE (dst), ptr);
12204 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, ptr);
12205 append_to_statement_list (t, &list);
12206 dst = build_simple_mem_ref_loc (loc, dst);
12208 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
12210 append_to_statement_list (t, &list);
12211 break;
12212 case OMP_CLAUSE_PRIVATE:
12213 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
12214 break;
12215 decl = OMP_CLAUSE_DECL (c);
12216 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
12217 f = (tree) n->value;
12218 if (tcctx.cb.decl_map)
12219 f = *tcctx.cb.decl_map->get (f);
12220 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
12221 if (n != NULL)
12223 sf = (tree) n->value;
12224 if (tcctx.cb.decl_map)
12225 sf = *tcctx.cb.decl_map->get (sf);
12226 src = build_simple_mem_ref_loc (loc, sarg);
12227 src = omp_build_component_ref (src, sf);
12228 if (use_pointer_for_field (decl, NULL))
12229 src = build_simple_mem_ref_loc (loc, src);
12231 else
12232 src = decl;
12233 dst = build_simple_mem_ref_loc (loc, arg);
12234 dst = omp_build_component_ref (dst, f);
12235 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12236 append_to_statement_list (t, &list);
12237 break;
12238 default:
12239 break;
12242 /* Last pass: handle VLA firstprivates. */
12243 if (tcctx.cb.decl_map)
12244 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
12245 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12247 tree ind, ptr, df;
12249 decl = OMP_CLAUSE_DECL (c);
12250 if (!is_variable_sized (decl))
12251 continue;
12252 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
12253 if (n == NULL)
12254 continue;
12255 f = (tree) n->value;
12256 f = *tcctx.cb.decl_map->get (f);
12257 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
12258 ind = DECL_VALUE_EXPR (decl);
12259 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
12260 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
12261 n = splay_tree_lookup (ctx->sfield_map,
12262 (splay_tree_key) TREE_OPERAND (ind, 0));
12263 sf = (tree) n->value;
12264 sf = *tcctx.cb.decl_map->get (sf);
12265 src = build_simple_mem_ref_loc (loc, sarg);
12266 src = omp_build_component_ref (src, sf);
12267 src = build_simple_mem_ref_loc (loc, src);
12268 dst = build_simple_mem_ref_loc (loc, arg);
12269 dst = omp_build_component_ref (dst, f);
12270 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
12271 append_to_statement_list (t, &list);
12272 n = splay_tree_lookup (ctx->field_map,
12273 (splay_tree_key) TREE_OPERAND (ind, 0));
12274 df = (tree) n->value;
12275 df = *tcctx.cb.decl_map->get (df);
12276 ptr = build_simple_mem_ref_loc (loc, arg);
12277 ptr = omp_build_component_ref (ptr, df);
12278 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
12279 build_fold_addr_expr_loc (loc, dst));
12280 append_to_statement_list (t, &list);
12283 t = build1 (RETURN_EXPR, void_type_node, NULL);
12284 append_to_statement_list (t, &list);
12286 if (tcctx.cb.decl_map)
12287 delete tcctx.cb.decl_map;
12288 pop_gimplify_context (NULL);
12289 BIND_EXPR_BODY (bind) = list;
12290 pop_cfun ();
12293 static void
12294 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
12296 tree c, clauses;
12297 gimple *g;
12298 size_t cnt[5] = { 0, 0, 0, 0, 0 }, idx = 2, i;
12300 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
12301 gcc_assert (clauses);
12302 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12303 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
12304 switch (OMP_CLAUSE_DEPEND_KIND (c))
12306 case OMP_CLAUSE_DEPEND_LAST:
12307 /* Lowering already done at gimplification. */
12308 return;
12309 case OMP_CLAUSE_DEPEND_IN:
12310 cnt[2]++;
12311 break;
12312 case OMP_CLAUSE_DEPEND_OUT:
12313 case OMP_CLAUSE_DEPEND_INOUT:
12314 cnt[0]++;
12315 break;
12316 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
12317 cnt[1]++;
12318 break;
12319 case OMP_CLAUSE_DEPEND_DEPOBJ:
12320 cnt[3]++;
12321 break;
12322 case OMP_CLAUSE_DEPEND_INOUTSET:
12323 cnt[4]++;
12324 break;
12325 default:
12326 gcc_unreachable ();
12328 if (cnt[1] || cnt[3] || cnt[4])
12329 idx = 5;
12330 size_t total = cnt[0] + cnt[1] + cnt[2] + cnt[3] + cnt[4];
12331 size_t inoutidx = total + idx;
12332 tree type = build_array_type_nelts (ptr_type_node, total + idx + 2 * cnt[4]);
12333 tree array = create_tmp_var (type);
12334 TREE_ADDRESSABLE (array) = 1;
12335 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
12336 NULL_TREE);
12337 if (idx == 5)
12339 g = gimple_build_assign (r, build_int_cst (ptr_type_node, 0));
12340 gimple_seq_add_stmt (iseq, g);
12341 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
12342 NULL_TREE);
12344 g = gimple_build_assign (r, build_int_cst (ptr_type_node, total));
12345 gimple_seq_add_stmt (iseq, g);
12346 for (i = 0; i < (idx == 5 ? 3 : 1); i++)
12348 r = build4 (ARRAY_REF, ptr_type_node, array,
12349 size_int (i + 1 + (idx == 5)), NULL_TREE, NULL_TREE);
12350 g = gimple_build_assign (r, build_int_cst (ptr_type_node, cnt[i]));
12351 gimple_seq_add_stmt (iseq, g);
12353 for (i = 0; i < 5; i++)
12355 if (cnt[i] == 0)
12356 continue;
12357 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12358 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
12359 continue;
12360 else
12362 switch (OMP_CLAUSE_DEPEND_KIND (c))
12364 case OMP_CLAUSE_DEPEND_IN:
12365 if (i != 2)
12366 continue;
12367 break;
12368 case OMP_CLAUSE_DEPEND_OUT:
12369 case OMP_CLAUSE_DEPEND_INOUT:
12370 if (i != 0)
12371 continue;
12372 break;
12373 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
12374 if (i != 1)
12375 continue;
12376 break;
12377 case OMP_CLAUSE_DEPEND_DEPOBJ:
12378 if (i != 3)
12379 continue;
12380 break;
12381 case OMP_CLAUSE_DEPEND_INOUTSET:
12382 if (i != 4)
12383 continue;
12384 break;
12385 default:
12386 gcc_unreachable ();
12388 tree t = OMP_CLAUSE_DECL (c);
12389 if (i == 4)
12391 t = build4 (ARRAY_REF, ptr_type_node, array,
12392 size_int (inoutidx), NULL_TREE, NULL_TREE);
12393 t = build_fold_addr_expr (t);
12394 inoutidx += 2;
12396 t = fold_convert (ptr_type_node, t);
12397 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
12398 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
12399 NULL_TREE, NULL_TREE);
12400 g = gimple_build_assign (r, t);
12401 gimple_seq_add_stmt (iseq, g);
12404 if (cnt[4])
12405 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12406 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
12407 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_INOUTSET)
12409 tree t = OMP_CLAUSE_DECL (c);
12410 t = fold_convert (ptr_type_node, t);
12411 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
12412 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
12413 NULL_TREE, NULL_TREE);
12414 g = gimple_build_assign (r, t);
12415 gimple_seq_add_stmt (iseq, g);
12416 t = build_int_cst (ptr_type_node, GOMP_DEPEND_INOUTSET);
12417 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
12418 NULL_TREE, NULL_TREE);
12419 g = gimple_build_assign (r, t);
12420 gimple_seq_add_stmt (iseq, g);
12423 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
12424 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
12425 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
12426 OMP_CLAUSE_CHAIN (c) = *pclauses;
12427 *pclauses = c;
12428 tree clobber = build_clobber (type);
12429 g = gimple_build_assign (array, clobber);
12430 gimple_seq_add_stmt (oseq, g);
12433 /* Lower the OpenMP parallel or task directive in the current statement
12434 in GSI_P. CTX holds context information for the directive. */
12436 static void
12437 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12439 tree clauses;
12440 tree child_fn, t;
12441 gimple *stmt = gsi_stmt (*gsi_p);
12442 gbind *par_bind, *bind, *dep_bind = NULL;
12443 gimple_seq par_body;
12444 location_t loc = gimple_location (stmt);
12446 clauses = gimple_omp_taskreg_clauses (stmt);
12447 if (gimple_code (stmt) == GIMPLE_OMP_TASK
12448 && gimple_omp_task_taskwait_p (stmt))
12450 par_bind = NULL;
12451 par_body = NULL;
12453 else
12455 par_bind
12456 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
12457 par_body = gimple_bind_body (par_bind);
12459 child_fn = ctx->cb.dst_fn;
12460 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
12461 && !gimple_omp_parallel_combined_p (stmt))
12463 struct walk_stmt_info wi;
12464 int ws_num = 0;
12466 memset (&wi, 0, sizeof (wi));
12467 wi.info = &ws_num;
12468 wi.val_only = true;
12469 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
12470 if (ws_num == 1)
12471 gimple_omp_parallel_set_combined_p (stmt, true);
12473 gimple_seq dep_ilist = NULL;
12474 gimple_seq dep_olist = NULL;
12475 if (gimple_code (stmt) == GIMPLE_OMP_TASK
12476 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
12478 push_gimplify_context ();
12479 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12480 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
12481 &dep_ilist, &dep_olist);
12484 if (gimple_code (stmt) == GIMPLE_OMP_TASK
12485 && gimple_omp_task_taskwait_p (stmt))
12487 if (dep_bind)
12489 gsi_replace (gsi_p, dep_bind, true);
12490 gimple_bind_add_seq (dep_bind, dep_ilist);
12491 gimple_bind_add_stmt (dep_bind, stmt);
12492 gimple_bind_add_seq (dep_bind, dep_olist);
12493 pop_gimplify_context (dep_bind);
12495 return;
12498 if (ctx->srecord_type)
12499 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
12501 gimple_seq tskred_ilist = NULL;
12502 gimple_seq tskred_olist = NULL;
12503 if ((is_task_ctx (ctx)
12504 && gimple_omp_task_taskloop_p (ctx->stmt)
12505 && omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
12506 OMP_CLAUSE_REDUCTION))
12507 || (is_parallel_ctx (ctx)
12508 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
12509 OMP_CLAUSE__REDUCTEMP_)))
12511 if (dep_bind == NULL)
12513 push_gimplify_context ();
12514 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12516 lower_omp_task_reductions (ctx, is_task_ctx (ctx) ? OMP_TASKLOOP
12517 : OMP_PARALLEL,
12518 gimple_omp_taskreg_clauses (ctx->stmt),
12519 &tskred_ilist, &tskred_olist);
12522 push_gimplify_context ();
12524 gimple_seq par_olist = NULL;
12525 gimple_seq par_ilist = NULL;
12526 gimple_seq par_rlist = NULL;
12527 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
12528 lower_omp (&par_body, ctx);
12529 if (gimple_code (stmt) != GIMPLE_OMP_TASK)
12530 lower_reduction_clauses (clauses, &par_rlist, NULL, ctx);
12532 /* Declare all the variables created by mapping and the variables
12533 declared in the scope of the parallel body. */
12534 record_vars_into (ctx->block_vars, child_fn);
12535 maybe_remove_omp_member_access_dummy_vars (par_bind);
12536 record_vars_into (gimple_bind_vars (par_bind), child_fn);
12538 if (ctx->record_type)
12540 ctx->sender_decl
12541 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
12542 : ctx->record_type, ".omp_data_o");
12543 DECL_NAMELESS (ctx->sender_decl) = 1;
12544 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
12545 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
12548 gimple_seq olist = NULL;
12549 gimple_seq ilist = NULL;
12550 lower_send_clauses (clauses, &ilist, &olist, ctx);
12551 lower_send_shared_vars (&ilist, &olist, ctx);
12553 if (ctx->record_type)
12555 tree clobber = build_clobber (TREE_TYPE (ctx->sender_decl));
12556 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
12557 clobber));
12560 /* Once all the expansions are done, sequence all the different
12561 fragments inside gimple_omp_body. */
12563 gimple_seq new_body = NULL;
12565 if (ctx->record_type)
12567 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
12568 /* fixup_child_record_type might have changed receiver_decl's type. */
12569 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
12570 gimple_seq_add_stmt (&new_body,
12571 gimple_build_assign (ctx->receiver_decl, t));
12574 gimple_seq_add_seq (&new_body, par_ilist);
12575 gimple_seq_add_seq (&new_body, par_body);
12576 gimple_seq_add_seq (&new_body, par_rlist);
12577 if (ctx->cancellable)
12578 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
12579 gimple_seq_add_seq (&new_body, par_olist);
12580 new_body = maybe_catch_exception (new_body);
12581 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
12582 gimple_seq_add_stmt (&new_body,
12583 gimple_build_omp_continue (integer_zero_node,
12584 integer_zero_node));
12585 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
12586 gimple_omp_set_body (stmt, new_body);
12588 if (dep_bind && gimple_bind_block (par_bind) == NULL_TREE)
12589 bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12590 else
12591 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
12592 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
12593 gimple_bind_add_seq (bind, ilist);
12594 gimple_bind_add_stmt (bind, stmt);
12595 gimple_bind_add_seq (bind, olist);
12597 pop_gimplify_context (NULL);
12599 if (dep_bind)
12601 gimple_bind_add_seq (dep_bind, dep_ilist);
12602 gimple_bind_add_seq (dep_bind, tskred_ilist);
12603 gimple_bind_add_stmt (dep_bind, bind);
12604 gimple_bind_add_seq (dep_bind, tskred_olist);
12605 gimple_bind_add_seq (dep_bind, dep_olist);
12606 pop_gimplify_context (dep_bind);
12610 /* Lower the GIMPLE_OMP_TARGET in the current statement
12611 in GSI_P. CTX holds context information for the directive. */
12613 static void
12614 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12616 tree clauses;
12617 tree child_fn, t, c;
12618 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
12619 gbind *tgt_bind, *bind, *dep_bind = NULL;
12620 gimple_seq tgt_body, olist, ilist, fplist, new_body;
12621 location_t loc = gimple_location (stmt);
12622 bool offloaded, data_region;
12623 unsigned int map_cnt = 0;
12624 tree in_reduction_clauses = NULL_TREE;
12626 tree deep_map_cnt = NULL_TREE;
12627 tree deep_map_data = NULL_TREE;
12628 tree deep_map_offset_data = NULL_TREE;
12629 tree deep_map_offset = NULL_TREE;
12631 offloaded = is_gimple_omp_offloaded (stmt);
12632 switch (gimple_omp_target_kind (stmt))
12634 case GF_OMP_TARGET_KIND_REGION:
12635 tree *p, *q;
12636 q = &in_reduction_clauses;
12637 for (p = gimple_omp_target_clauses_ptr (stmt); *p; )
12638 if (OMP_CLAUSE_CODE (*p) == OMP_CLAUSE_IN_REDUCTION)
12640 *q = *p;
12641 q = &OMP_CLAUSE_CHAIN (*q);
12642 *p = OMP_CLAUSE_CHAIN (*p);
12644 else
12645 p = &OMP_CLAUSE_CHAIN (*p);
12646 *q = NULL_TREE;
12647 *p = in_reduction_clauses;
12648 /* FALLTHRU */
12649 case GF_OMP_TARGET_KIND_UPDATE:
12650 case GF_OMP_TARGET_KIND_ENTER_DATA:
12651 case GF_OMP_TARGET_KIND_EXIT_DATA:
12652 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
12653 case GF_OMP_TARGET_KIND_OACC_KERNELS:
12654 case GF_OMP_TARGET_KIND_OACC_SERIAL:
12655 case GF_OMP_TARGET_KIND_OACC_UPDATE:
12656 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA:
12657 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA:
12658 case GF_OMP_TARGET_KIND_OACC_DECLARE:
12659 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
12660 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
12661 data_region = false;
12662 break;
12663 case GF_OMP_TARGET_KIND_DATA:
12664 case GF_OMP_TARGET_KIND_OACC_DATA:
12665 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
12666 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
12667 data_region = true;
12668 break;
12669 default:
12670 gcc_unreachable ();
12673 /* Ensure that requires map is written via output_offload_tables, even if only
12674 'target (enter/exit) data' is used in the translation unit. */
12675 if (ENABLE_OFFLOADING && (omp_requires_mask & OMP_REQUIRES_TARGET_USED))
12676 g->have_offload = true;
12678 clauses = gimple_omp_target_clauses (stmt);
12680 gimple_seq dep_ilist = NULL;
12681 gimple_seq dep_olist = NULL;
12682 bool has_depend = omp_find_clause (clauses, OMP_CLAUSE_DEPEND) != NULL_TREE;
12683 if (has_depend || in_reduction_clauses)
12685 push_gimplify_context ();
12686 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12687 if (has_depend)
12688 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
12689 &dep_ilist, &dep_olist);
12690 if (in_reduction_clauses)
12691 lower_rec_input_clauses (in_reduction_clauses, &dep_ilist, &dep_olist,
12692 ctx, NULL);
12695 tgt_bind = NULL;
12696 tgt_body = NULL;
12697 if (offloaded)
12699 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
12700 tgt_body = gimple_bind_body (tgt_bind);
12702 else if (data_region)
12703 tgt_body = gimple_omp_body (stmt);
12704 child_fn = ctx->cb.dst_fn;
12706 push_gimplify_context ();
12707 fplist = NULL;
12709 ilist = NULL;
12710 olist = NULL;
12711 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
12712 switch (OMP_CLAUSE_CODE (c))
12714 tree var, x;
12716 default:
12717 break;
12718 case OMP_CLAUSE_MAP:
12719 #if CHECKING_P
12720 /* First check what we're prepared to handle in the following. */
12721 switch (OMP_CLAUSE_MAP_KIND (c))
12723 case GOMP_MAP_ALLOC:
12724 case GOMP_MAP_TO:
12725 case GOMP_MAP_FROM:
12726 case GOMP_MAP_TOFROM:
12727 case GOMP_MAP_POINTER:
12728 case GOMP_MAP_TO_PSET:
12729 case GOMP_MAP_DELETE:
12730 case GOMP_MAP_RELEASE:
12731 case GOMP_MAP_ALWAYS_TO:
12732 case GOMP_MAP_ALWAYS_FROM:
12733 case GOMP_MAP_ALWAYS_TOFROM:
12734 case GOMP_MAP_FORCE_PRESENT:
12735 case GOMP_MAP_ALWAYS_PRESENT_FROM:
12736 case GOMP_MAP_ALWAYS_PRESENT_TO:
12737 case GOMP_MAP_ALWAYS_PRESENT_TOFROM:
12739 case GOMP_MAP_FIRSTPRIVATE_POINTER:
12740 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
12741 case GOMP_MAP_STRUCT:
12742 case GOMP_MAP_STRUCT_UNORD:
12743 case GOMP_MAP_ALWAYS_POINTER:
12744 case GOMP_MAP_ATTACH:
12745 case GOMP_MAP_DETACH:
12746 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
12747 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
12748 break;
12749 case GOMP_MAP_IF_PRESENT:
12750 case GOMP_MAP_FORCE_ALLOC:
12751 case GOMP_MAP_FORCE_TO:
12752 case GOMP_MAP_FORCE_FROM:
12753 case GOMP_MAP_FORCE_TOFROM:
12754 case GOMP_MAP_FORCE_DEVICEPTR:
12755 case GOMP_MAP_DEVICE_RESIDENT:
12756 case GOMP_MAP_LINK:
12757 case GOMP_MAP_FORCE_DETACH:
12758 gcc_assert (is_gimple_omp_oacc (stmt));
12759 break;
12760 default:
12761 gcc_unreachable ();
12763 #endif
12764 /* FALLTHRU */
12765 case OMP_CLAUSE_TO:
12766 case OMP_CLAUSE_FROM:
12767 oacc_firstprivate:
12768 var = OMP_CLAUSE_DECL (c);
12770 tree extra = lang_hooks.decls.omp_deep_mapping_cnt (stmt, c, &ilist);
12771 if (extra != NULL_TREE && deep_map_cnt != NULL_TREE)
12772 deep_map_cnt = fold_build2_loc (OMP_CLAUSE_LOCATION (c), PLUS_EXPR,
12773 size_type_node, deep_map_cnt,
12774 extra);
12775 else if (extra != NULL_TREE)
12776 deep_map_cnt = extra;
12779 if (!DECL_P (var))
12781 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
12782 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
12783 && (OMP_CLAUSE_MAP_KIND (c)
12784 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
12785 map_cnt++;
12786 continue;
12789 if (DECL_SIZE (var)
12790 && !poly_int_tree_p (DECL_SIZE (var)))
12792 tree var2 = DECL_VALUE_EXPR (var);
12793 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
12794 var2 = TREE_OPERAND (var2, 0);
12795 gcc_assert (DECL_P (var2));
12796 var = var2;
12799 if (offloaded
12800 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12801 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12802 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
12804 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12806 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
12807 && varpool_node::get_create (var)->offloadable)
12808 continue;
12810 tree type = build_pointer_type (TREE_TYPE (var));
12811 tree new_var = lookup_decl (var, ctx);
12812 x = create_tmp_var_raw (type, get_name (new_var));
12813 gimple_add_tmp_var (x);
12814 x = build_simple_mem_ref (x);
12815 SET_DECL_VALUE_EXPR (new_var, x);
12816 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12818 continue;
12821 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12822 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
12823 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
12824 && is_omp_target (stmt))
12826 gcc_assert (maybe_lookup_field (c, ctx));
12827 map_cnt++;
12828 continue;
12831 if (!maybe_lookup_field (var, ctx))
12832 continue;
12834 /* Don't remap compute constructs' reduction variables, because the
12835 intermediate result must be local to each gang. */
12836 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12837 && is_gimple_omp_oacc (ctx->stmt)
12838 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
12840 x = build_receiver_ref (var, true, ctx);
12841 tree new_var = lookup_decl (var, ctx);
12843 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12844 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
12845 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
12846 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12847 x = build_simple_mem_ref (x);
12848 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12850 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
12851 if (omp_privatize_by_reference (new_var)
12852 && (TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE
12853 || DECL_BY_REFERENCE (var)))
12855 /* Create a local object to hold the instance
12856 value. */
12857 tree type = TREE_TYPE (TREE_TYPE (new_var));
12858 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
12859 tree inst = create_tmp_var (type, id);
12860 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
12861 x = build_fold_addr_expr (inst);
12863 gimplify_assign (new_var, x, &fplist);
12865 else if (DECL_P (new_var))
12867 SET_DECL_VALUE_EXPR (new_var, x);
12868 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12870 else
12871 gcc_unreachable ();
12873 map_cnt++;
12874 break;
12876 case OMP_CLAUSE_FIRSTPRIVATE:
12877 omp_firstprivate_recv:
12878 gcc_checking_assert (offloaded);
12879 if (is_gimple_omp_oacc (ctx->stmt))
12881 /* No 'firstprivate' clauses on OpenACC 'kernels'. */
12882 gcc_checking_assert (!is_oacc_kernels (ctx));
12883 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12884 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
12886 goto oacc_firstprivate;
12888 map_cnt++;
12889 var = OMP_CLAUSE_DECL (c);
12890 if (!omp_privatize_by_reference (var)
12891 && !is_gimple_reg_type (TREE_TYPE (var)))
12893 tree new_var = lookup_decl (var, ctx);
12894 if (is_variable_sized (var))
12896 tree pvar = DECL_VALUE_EXPR (var);
12897 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12898 pvar = TREE_OPERAND (pvar, 0);
12899 gcc_assert (DECL_P (pvar));
12900 tree new_pvar = lookup_decl (pvar, ctx);
12901 x = build_fold_indirect_ref (new_pvar);
12902 TREE_THIS_NOTRAP (x) = 1;
12904 else
12905 x = build_receiver_ref (var, true, ctx);
12906 SET_DECL_VALUE_EXPR (new_var, x);
12907 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12909 /* Fortran array descriptors: firstprivate of data + attach. */
12910 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR
12911 && lang_hooks.decls.omp_array_data (var, true))
12912 map_cnt += 2;
12913 break;
12915 case OMP_CLAUSE_PRIVATE:
12916 gcc_checking_assert (offloaded);
12917 if (is_gimple_omp_oacc (ctx->stmt))
12919 /* No 'private' clauses on OpenACC 'kernels'. */
12920 gcc_checking_assert (!is_oacc_kernels (ctx));
12921 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12922 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
12924 break;
12926 var = OMP_CLAUSE_DECL (c);
12927 if (is_variable_sized (var))
12929 tree new_var = lookup_decl (var, ctx);
12930 tree pvar = DECL_VALUE_EXPR (var);
12931 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12932 pvar = TREE_OPERAND (pvar, 0);
12933 gcc_assert (DECL_P (pvar));
12934 tree new_pvar = lookup_decl (pvar, ctx);
12935 x = build_fold_indirect_ref (new_pvar);
12936 TREE_THIS_NOTRAP (x) = 1;
12937 SET_DECL_VALUE_EXPR (new_var, x);
12938 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12940 break;
12942 case OMP_CLAUSE_USE_DEVICE_PTR:
12943 case OMP_CLAUSE_USE_DEVICE_ADDR:
12944 case OMP_CLAUSE_HAS_DEVICE_ADDR:
12945 case OMP_CLAUSE_IS_DEVICE_PTR:
12946 var = OMP_CLAUSE_DECL (c);
12947 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
12949 while (TREE_CODE (var) == INDIRECT_REF
12950 || TREE_CODE (var) == ARRAY_REF)
12951 var = TREE_OPERAND (var, 0);
12952 if (lang_hooks.decls.omp_array_data (var, true))
12953 goto omp_firstprivate_recv;
12955 map_cnt++;
12956 if (is_variable_sized (var))
12958 tree new_var = lookup_decl (var, ctx);
12959 tree pvar = DECL_VALUE_EXPR (var);
12960 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12961 pvar = TREE_OPERAND (pvar, 0);
12962 gcc_assert (DECL_P (pvar));
12963 tree new_pvar = lookup_decl (pvar, ctx);
12964 x = build_fold_indirect_ref (new_pvar);
12965 TREE_THIS_NOTRAP (x) = 1;
12966 SET_DECL_VALUE_EXPR (new_var, x);
12967 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12969 else if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
12970 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
12971 && !omp_privatize_by_reference (var)
12972 && !omp_is_allocatable_or_ptr (var)
12973 && !lang_hooks.decls.omp_array_data (var, true))
12974 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12976 tree new_var = lookup_decl (var, ctx);
12977 tree type = build_pointer_type (TREE_TYPE (var));
12978 x = create_tmp_var_raw (type, get_name (new_var));
12979 gimple_add_tmp_var (x);
12980 x = build_simple_mem_ref (x);
12981 SET_DECL_VALUE_EXPR (new_var, x);
12982 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12984 else
12986 tree new_var = lookup_decl (var, ctx);
12987 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
12988 gimple_add_tmp_var (x);
12989 SET_DECL_VALUE_EXPR (new_var, x);
12990 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12992 break;
12995 if (offloaded)
12997 target_nesting_level++;
12998 lower_omp (&tgt_body, ctx);
12999 target_nesting_level--;
13001 else if (data_region)
13002 lower_omp (&tgt_body, ctx);
13004 if (offloaded)
13006 /* Declare all the variables created by mapping and the variables
13007 declared in the scope of the target body. */
13008 record_vars_into (ctx->block_vars, child_fn);
13009 maybe_remove_omp_member_access_dummy_vars (tgt_bind);
13010 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
13013 if (ctx->record_type)
13015 if (deep_map_cnt && TREE_CODE (deep_map_cnt) == INTEGER_CST)
13016 /* map_cnt = map_cnt + tree_to_hwi (deep_map_cnt); */
13017 /* deep_map_cnt = NULL_TREE; */
13018 gcc_unreachable ();
13019 else if (deep_map_cnt)
13021 gcc_assert (flexible_array_type_p (ctx->record_type));
13022 tree n = create_tmp_var_raw (size_type_node, "nn_map");
13023 gimple_add_tmp_var (n);
13024 gimplify_assign (n, deep_map_cnt, &ilist);
13025 deep_map_cnt = n;
13027 ctx->sender_decl
13028 = create_tmp_var (deep_map_cnt ? build_pointer_type (ctx->record_type)
13029 : ctx->record_type, ".omp_data_arr");
13030 DECL_NAMELESS (ctx->sender_decl) = 1;
13031 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
13032 t = make_tree_vec (deep_map_cnt ? 4 : 3);
13033 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
13034 TREE_VEC_ELT (t, 1)
13035 = create_tmp_var (deep_map_cnt
13036 ? build_pointer_type (size_type_node)
13037 : build_array_type_nelts (size_type_node, map_cnt),
13038 ".omp_data_sizes");
13039 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
13040 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
13041 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
13042 tree tkind_type = short_unsigned_type_node;
13043 int talign_shift = 8;
13044 TREE_VEC_ELT (t, 2)
13045 = create_tmp_var (deep_map_cnt
13046 ? build_pointer_type (tkind_type)
13047 : build_array_type_nelts (tkind_type, map_cnt),
13048 ".omp_data_kinds");
13049 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
13050 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
13051 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
13052 gimple_omp_target_set_data_arg (stmt, t);
13054 if (deep_map_cnt)
13056 tree tmp, size;
13057 size = create_tmp_var (size_type_node, NULL);
13058 DECL_NAMELESS (size) = 1;
13059 gimplify_assign (size,
13060 fold_build2_loc (UNKNOWN_LOCATION, PLUS_EXPR,
13061 size_type_node, deep_map_cnt,
13062 build_int_cst (size_type_node,
13063 map_cnt)), &ilist);
13064 TREE_VEC_ELT (t, 3) = size;
13066 tree call = builtin_decl_explicit (BUILT_IN_MALLOC);
13067 size = fold_build2_loc (UNKNOWN_LOCATION, MULT_EXPR,
13068 size_type_node, deep_map_cnt,
13069 TYPE_SIZE_UNIT (ptr_type_node));
13070 size = fold_build2_loc (UNKNOWN_LOCATION, PLUS_EXPR,
13071 size_type_node, size,
13072 TYPE_SIZE_UNIT (ctx->record_type));
13073 tmp = build_call_expr_loc (input_location, call, 1, size);
13074 gimplify_assign (ctx->sender_decl, tmp, &ilist);
13076 size = fold_build2_loc (UNKNOWN_LOCATION, MULT_EXPR,
13077 size_type_node, TREE_VEC_ELT (t, 3),
13078 TYPE_SIZE_UNIT (size_type_node));
13079 tmp = build_call_expr_loc (input_location, call, 1, size);
13080 gimplify_assign (TREE_VEC_ELT (t, 1), tmp, &ilist);
13082 size = fold_build2_loc (UNKNOWN_LOCATION, MULT_EXPR,
13083 size_type_node, TREE_VEC_ELT (t, 3),
13084 TYPE_SIZE_UNIT (tkind_type));
13085 tmp = build_call_expr_loc (input_location, call, 1, size);
13086 gimplify_assign (TREE_VEC_ELT (t, 2), tmp, &ilist);
13087 tree field = TYPE_FIELDS (TREE_TYPE (TREE_TYPE (ctx->sender_decl)));
13088 for ( ; DECL_CHAIN (field) != NULL_TREE; field = DECL_CHAIN (field))
13090 gcc_assert (TREE_CODE (TREE_TYPE (field)));
13091 tmp = build_fold_indirect_ref (ctx->sender_decl);
13092 deep_map_data = omp_build_component_ref (tmp, field);
13093 deep_map_offset_data = create_tmp_var_raw (size_type_node,
13094 "map_offset_data");
13095 deep_map_offset = create_tmp_var_raw (size_type_node, "map_offset");
13096 gimple_add_tmp_var (deep_map_offset_data);
13097 gimple_add_tmp_var (deep_map_offset);
13098 gimplify_assign (deep_map_offset_data, build_int_cst (size_type_node,
13099 0), &ilist);
13100 gimplify_assign (deep_map_offset, build_int_cst (size_type_node,
13101 map_cnt), &ilist);
13104 vec<constructor_elt, va_gc> *vsize;
13105 vec<constructor_elt, va_gc> *vkind;
13106 vec_alloc (vsize, map_cnt);
13107 vec_alloc (vkind, map_cnt);
13108 unsigned int map_idx = 0;
13110 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
13111 switch (OMP_CLAUSE_CODE (c))
13113 tree ovar, nc, s, purpose, var, x, type;
13114 unsigned int talign;
13116 default:
13117 break;
13119 case OMP_CLAUSE_MAP:
13120 case OMP_CLAUSE_TO:
13121 case OMP_CLAUSE_FROM:
13122 oacc_firstprivate_map:
13123 nc = c;
13124 ovar = OMP_CLAUSE_DECL (c);
13125 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13126 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
13127 || (OMP_CLAUSE_MAP_KIND (c)
13128 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
13129 break;
13130 if (deep_map_cnt)
13132 unsigned HOST_WIDE_INT tkind2;
13133 switch (OMP_CLAUSE_CODE (c))
13135 case OMP_CLAUSE_MAP: tkind2 = OMP_CLAUSE_MAP_KIND (c); break;
13136 case OMP_CLAUSE_FIRSTPRIVATE: tkind2 = GOMP_MAP_TO; break;
13137 case OMP_CLAUSE_TO: tkind2 = GOMP_MAP_TO; break;
13138 case OMP_CLAUSE_FROM: tkind2 = GOMP_MAP_FROM; break;
13139 default: gcc_unreachable ();
13141 lang_hooks.decls.omp_deep_mapping (stmt, c, tkind2,
13142 deep_map_data,
13143 TREE_VEC_ELT (t, 1),
13144 TREE_VEC_ELT (t, 2),
13145 deep_map_offset_data,
13146 deep_map_offset, &ilist);
13148 if (!DECL_P (ovar))
13150 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13151 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
13153 nc = OMP_CLAUSE_CHAIN (c);
13154 gcc_checking_assert (OMP_CLAUSE_DECL (nc)
13155 == get_base_address (ovar));
13156 ovar = OMP_CLAUSE_DECL (nc);
13158 else
13160 tree x = build_sender_ref (ovar, ctx);
13161 tree v = ovar;
13162 if (in_reduction_clauses
13163 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13164 && OMP_CLAUSE_MAP_IN_REDUCTION (c))
13166 v = unshare_expr (v);
13167 tree *p = &v;
13168 while (handled_component_p (*p)
13169 || TREE_CODE (*p) == INDIRECT_REF
13170 || TREE_CODE (*p) == ADDR_EXPR
13171 || TREE_CODE (*p) == MEM_REF
13172 || TREE_CODE (*p) == NON_LVALUE_EXPR)
13173 p = &TREE_OPERAND (*p, 0);
13174 tree d = *p;
13175 if (is_variable_sized (d))
13177 gcc_assert (DECL_HAS_VALUE_EXPR_P (d));
13178 d = DECL_VALUE_EXPR (d);
13179 gcc_assert (TREE_CODE (d) == INDIRECT_REF);
13180 d = TREE_OPERAND (d, 0);
13181 gcc_assert (DECL_P (d));
13183 splay_tree_key key
13184 = (splay_tree_key) &DECL_CONTEXT (d);
13185 tree nd = (tree) splay_tree_lookup (ctx->field_map,
13186 key)->value;
13187 if (d == *p)
13188 *p = nd;
13189 else
13190 *p = build_fold_indirect_ref (nd);
13192 v = build_fold_addr_expr_with_type (v, ptr_type_node);
13193 gimplify_assign (x, v, &ilist);
13194 nc = NULL_TREE;
13197 else
13199 if (DECL_SIZE (ovar)
13200 && !poly_int_tree_p (DECL_SIZE (ovar)))
13202 tree ovar2 = DECL_VALUE_EXPR (ovar);
13203 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
13204 ovar2 = TREE_OPERAND (ovar2, 0);
13205 gcc_assert (DECL_P (ovar2));
13206 ovar = ovar2;
13208 if (!maybe_lookup_field (ovar, ctx)
13209 && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13210 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
13211 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)))
13212 continue;
13215 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
13216 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
13217 talign = DECL_ALIGN_UNIT (ovar);
13219 var = NULL_TREE;
13220 if (nc)
13222 if (in_reduction_clauses
13223 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13224 && OMP_CLAUSE_MAP_IN_REDUCTION (c))
13226 tree d = ovar;
13227 if (is_variable_sized (d))
13229 gcc_assert (DECL_HAS_VALUE_EXPR_P (d));
13230 d = DECL_VALUE_EXPR (d);
13231 gcc_assert (TREE_CODE (d) == INDIRECT_REF);
13232 d = TREE_OPERAND (d, 0);
13233 gcc_assert (DECL_P (d));
13235 splay_tree_key key
13236 = (splay_tree_key) &DECL_CONTEXT (d);
13237 tree nd = (tree) splay_tree_lookup (ctx->field_map,
13238 key)->value;
13239 if (d == ovar)
13240 var = nd;
13241 else
13242 var = build_fold_indirect_ref (nd);
13244 else
13245 var = lookup_decl_in_outer_ctx (ovar, ctx);
13247 if (nc
13248 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13249 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
13250 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
13251 && is_omp_target (stmt))
13253 x = build_sender_ref (c, ctx);
13254 gimplify_assign (x, build_fold_addr_expr (var), &ilist);
13256 else if (nc)
13258 x = build_sender_ref (ovar, ctx);
13260 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13261 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
13262 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
13263 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
13265 gcc_assert (offloaded);
13266 tree avar
13267 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
13268 mark_addressable (avar);
13269 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
13270 talign = DECL_ALIGN_UNIT (avar);
13271 avar = build_fold_addr_expr (avar);
13272 gimplify_assign (x, avar, &ilist);
13274 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
13276 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
13277 if (!omp_privatize_by_reference (var))
13279 if (is_gimple_reg (var)
13280 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13281 suppress_warning (var);
13282 var = build_fold_addr_expr (var);
13284 else
13285 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
13286 gimplify_assign (x, var, &ilist);
13288 else if (is_gimple_reg (var))
13290 gcc_assert (offloaded);
13291 tree avar = create_tmp_var (TREE_TYPE (var));
13292 mark_addressable (avar);
13293 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
13294 if (GOMP_MAP_COPY_TO_P (map_kind)
13295 || map_kind == GOMP_MAP_POINTER
13296 || map_kind == GOMP_MAP_TO_PSET
13297 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
13299 /* If we need to initialize a temporary
13300 with VAR because it is not addressable, and
13301 the variable hasn't been initialized yet, then
13302 we'll get a warning for the store to avar.
13303 Don't warn in that case, the mapping might
13304 be implicit. */
13305 suppress_warning (var, OPT_Wuninitialized);
13306 gimplify_assign (avar, var, &ilist);
13308 avar = build_fold_addr_expr (avar);
13309 gimplify_assign (x, avar, &ilist);
13310 if ((GOMP_MAP_COPY_FROM_P (map_kind)
13311 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
13312 && !TYPE_READONLY (TREE_TYPE (var)))
13314 x = unshare_expr (x);
13315 x = build_simple_mem_ref (x);
13316 gimplify_assign (var, x, &olist);
13319 else
13321 /* While MAP is handled explicitly by the FE,
13322 for 'target update', only the identified is passed. */
13323 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FROM
13324 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TO)
13325 && (omp_is_allocatable_or_ptr (var)
13326 && omp_check_optional_argument (var, false)))
13327 var = build_fold_indirect_ref (var);
13328 else if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FROM
13329 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_TO)
13330 || (!omp_is_allocatable_or_ptr (var)
13331 && !omp_check_optional_argument (var, false)))
13332 var = build_fold_addr_expr (var);
13333 gimplify_assign (x, var, &ilist);
13336 s = NULL_TREE;
13337 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
13339 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
13340 s = TREE_TYPE (ovar);
13341 if (TREE_CODE (s) == REFERENCE_TYPE
13342 || omp_check_optional_argument (ovar, false))
13343 s = TREE_TYPE (s);
13344 s = TYPE_SIZE_UNIT (s);
13346 else
13347 s = OMP_CLAUSE_SIZE (c);
13348 if (s == NULL_TREE)
13349 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
13350 s = fold_convert (size_type_node, s);
13351 purpose = size_int (map_idx++);
13352 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13353 if (TREE_CODE (s) != INTEGER_CST)
13354 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
13356 unsigned HOST_WIDE_INT tkind, tkind_zero;
13357 switch (OMP_CLAUSE_CODE (c))
13359 case OMP_CLAUSE_MAP:
13360 tkind = OMP_CLAUSE_MAP_KIND (c);
13361 tkind_zero = tkind;
13362 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
13363 switch (tkind)
13365 case GOMP_MAP_ALLOC:
13366 case GOMP_MAP_IF_PRESENT:
13367 case GOMP_MAP_TO:
13368 case GOMP_MAP_FROM:
13369 case GOMP_MAP_TOFROM:
13370 case GOMP_MAP_ALWAYS_TO:
13371 case GOMP_MAP_ALWAYS_FROM:
13372 case GOMP_MAP_ALWAYS_TOFROM:
13373 case GOMP_MAP_ALWAYS_PRESENT_TO:
13374 case GOMP_MAP_ALWAYS_PRESENT_FROM:
13375 case GOMP_MAP_ALWAYS_PRESENT_TOFROM:
13376 case GOMP_MAP_RELEASE:
13377 case GOMP_MAP_FORCE_TO:
13378 case GOMP_MAP_FORCE_FROM:
13379 case GOMP_MAP_FORCE_TOFROM:
13380 case GOMP_MAP_FORCE_PRESENT:
13381 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
13382 break;
13383 case GOMP_MAP_DELETE:
13384 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
13385 default:
13386 break;
13388 if (tkind_zero != tkind)
13390 if (integer_zerop (s))
13391 tkind = tkind_zero;
13392 else if (integer_nonzerop (s))
13393 tkind_zero = tkind;
13395 if (tkind_zero == tkind
13396 && OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (c)
13397 && (((tkind & GOMP_MAP_FLAG_SPECIAL_BITS)
13398 & ~GOMP_MAP_IMPLICIT)
13399 == 0))
13401 /* If this is an implicit map, and the GOMP_MAP_IMPLICIT
13402 bits are not interfered by other special bit encodings,
13403 then turn the GOMP_IMPLICIT_BIT flag on for the runtime
13404 to see. */
13405 tkind |= GOMP_MAP_IMPLICIT;
13406 tkind_zero = tkind;
13408 break;
13409 case OMP_CLAUSE_FIRSTPRIVATE:
13410 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
13411 tkind = GOMP_MAP_TO;
13412 tkind_zero = tkind;
13413 break;
13414 case OMP_CLAUSE_TO:
13415 tkind
13416 = (OMP_CLAUSE_MOTION_PRESENT (c)
13417 ? GOMP_MAP_ALWAYS_PRESENT_TO : GOMP_MAP_TO);
13418 tkind_zero = tkind;
13419 break;
13420 case OMP_CLAUSE_FROM:
13421 tkind
13422 = (OMP_CLAUSE_MOTION_PRESENT (c)
13423 ? GOMP_MAP_ALWAYS_PRESENT_FROM : GOMP_MAP_FROM);
13424 tkind_zero = tkind;
13425 break;
13426 default:
13427 gcc_unreachable ();
13429 gcc_checking_assert (tkind
13430 < (HOST_WIDE_INT_C (1U) << talign_shift));
13431 gcc_checking_assert (tkind_zero
13432 < (HOST_WIDE_INT_C (1U) << talign_shift));
13433 talign = ceil_log2 (talign);
13434 tkind |= talign << talign_shift;
13435 tkind_zero |= talign << talign_shift;
13436 gcc_checking_assert (tkind
13437 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13438 gcc_checking_assert (tkind_zero
13439 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13440 if (tkind == tkind_zero)
13441 x = build_int_cstu (tkind_type, tkind);
13442 else
13444 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
13445 x = build3 (COND_EXPR, tkind_type,
13446 fold_build2 (EQ_EXPR, boolean_type_node,
13447 unshare_expr (s), size_zero_node),
13448 build_int_cstu (tkind_type, tkind_zero),
13449 build_int_cstu (tkind_type, tkind));
13451 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
13452 if (nc && nc != c)
13453 c = nc;
13454 break;
13456 case OMP_CLAUSE_FIRSTPRIVATE:
13457 omp_has_device_addr_descr:
13458 if (is_gimple_omp_oacc (ctx->stmt))
13459 goto oacc_firstprivate_map;
13460 ovar = OMP_CLAUSE_DECL (c);
13461 if (omp_privatize_by_reference (ovar))
13462 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
13463 else
13464 talign = DECL_ALIGN_UNIT (ovar);
13465 var = lookup_decl_in_outer_ctx (ovar, ctx);
13466 x = build_sender_ref (ovar, ctx);
13467 tkind = GOMP_MAP_FIRSTPRIVATE;
13468 type = TREE_TYPE (ovar);
13469 if (omp_privatize_by_reference (ovar))
13470 type = TREE_TYPE (type);
13471 if ((INTEGRAL_TYPE_P (type)
13472 && TYPE_PRECISION (type) <= POINTER_SIZE)
13473 || TREE_CODE (type) == POINTER_TYPE)
13475 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
13476 tree t = var;
13477 if (omp_privatize_by_reference (var))
13478 t = build_simple_mem_ref (var);
13479 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13480 suppress_warning (var);
13481 if (TREE_CODE (type) != POINTER_TYPE)
13482 t = fold_convert (pointer_sized_int_node, t);
13483 t = fold_convert (TREE_TYPE (x), t);
13484 gimplify_assign (x, t, &ilist);
13486 else if (omp_privatize_by_reference (var))
13487 gimplify_assign (x, var, &ilist);
13488 else if (is_gimple_reg (var))
13490 tree avar = create_tmp_var (TREE_TYPE (var));
13491 mark_addressable (avar);
13492 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13493 suppress_warning (var);
13494 gimplify_assign (avar, var, &ilist);
13495 avar = build_fold_addr_expr (avar);
13496 gimplify_assign (x, avar, &ilist);
13498 else
13500 var = build_fold_addr_expr (var);
13501 gimplify_assign (x, var, &ilist);
13503 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
13504 s = size_int (0);
13505 else if (omp_privatize_by_reference (ovar))
13506 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
13507 else
13508 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
13509 s = fold_convert (size_type_node, s);
13510 purpose = size_int (map_idx++);
13511 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13512 if (TREE_CODE (s) != INTEGER_CST)
13513 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
13515 gcc_checking_assert (tkind
13516 < (HOST_WIDE_INT_C (1U) << talign_shift));
13517 talign = ceil_log2 (talign);
13518 tkind |= talign << talign_shift;
13519 gcc_checking_assert (tkind
13520 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13521 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13522 build_int_cstu (tkind_type, tkind));
13523 /* Fortran array descriptors: firstprivate of data + attach. */
13524 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR
13525 && lang_hooks.decls.omp_array_data (ovar, true))
13527 tree not_null_lb, null_lb, after_lb;
13528 tree var1, var2, size1, size2;
13529 tree present = omp_check_optional_argument (ovar, true);
13530 if (present)
13532 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13533 not_null_lb = create_artificial_label (clause_loc);
13534 null_lb = create_artificial_label (clause_loc);
13535 after_lb = create_artificial_label (clause_loc);
13536 gimple_seq seq = NULL;
13537 present = force_gimple_operand (present, &seq, true,
13538 NULL_TREE);
13539 gimple_seq_add_seq (&ilist, seq);
13540 gimple_seq_add_stmt (&ilist,
13541 gimple_build_cond_from_tree (present,
13542 not_null_lb, null_lb));
13543 gimple_seq_add_stmt (&ilist,
13544 gimple_build_label (not_null_lb));
13546 var1 = lang_hooks.decls.omp_array_data (var, false);
13547 size1 = lang_hooks.decls.omp_array_size (var, &ilist);
13548 var2 = build_fold_addr_expr (x);
13549 if (!POINTER_TYPE_P (TREE_TYPE (var)))
13550 var = build_fold_addr_expr (var);
13551 size2 = fold_build2 (POINTER_DIFF_EXPR, ssizetype,
13552 build_fold_addr_expr (var1), var);
13553 size2 = fold_convert (sizetype, size2);
13554 if (present)
13556 tree tmp = create_tmp_var (TREE_TYPE (var1));
13557 gimplify_assign (tmp, var1, &ilist);
13558 var1 = tmp;
13559 tmp = create_tmp_var (TREE_TYPE (var2));
13560 gimplify_assign (tmp, var2, &ilist);
13561 var2 = tmp;
13562 tmp = create_tmp_var (TREE_TYPE (size1));
13563 gimplify_assign (tmp, size1, &ilist);
13564 size1 = tmp;
13565 tmp = create_tmp_var (TREE_TYPE (size2));
13566 gimplify_assign (tmp, size2, &ilist);
13567 size2 = tmp;
13568 gimple_seq_add_stmt (&ilist, gimple_build_goto (after_lb));
13569 gimple_seq_add_stmt (&ilist, gimple_build_label (null_lb));
13570 gimplify_assign (var1, null_pointer_node, &ilist);
13571 gimplify_assign (var2, null_pointer_node, &ilist);
13572 gimplify_assign (size1, size_zero_node, &ilist);
13573 gimplify_assign (size2, size_zero_node, &ilist);
13574 gimple_seq_add_stmt (&ilist, gimple_build_label (after_lb));
13576 x = build_sender_ref ((splay_tree_key) &DECL_NAME (ovar), ctx);
13577 gimplify_assign (x, var1, &ilist);
13578 tkind = GOMP_MAP_FIRSTPRIVATE;
13579 talign = DECL_ALIGN_UNIT (ovar);
13580 talign = ceil_log2 (talign);
13581 tkind |= talign << talign_shift;
13582 gcc_checking_assert (tkind
13583 <= tree_to_uhwi (
13584 TYPE_MAX_VALUE (tkind_type)));
13585 purpose = size_int (map_idx++);
13586 CONSTRUCTOR_APPEND_ELT (vsize, purpose, size1);
13587 if (TREE_CODE (size1) != INTEGER_CST)
13588 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
13589 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13590 build_int_cstu (tkind_type, tkind));
13591 x = build_sender_ref ((splay_tree_key) &DECL_UID (ovar), ctx);
13592 gimplify_assign (x, var2, &ilist);
13593 tkind = GOMP_MAP_ATTACH;
13594 purpose = size_int (map_idx++);
13595 CONSTRUCTOR_APPEND_ELT (vsize, purpose, size2);
13596 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13597 build_int_cstu (tkind_type, tkind));
13599 break;
13601 case OMP_CLAUSE_USE_DEVICE_PTR:
13602 case OMP_CLAUSE_USE_DEVICE_ADDR:
13603 case OMP_CLAUSE_HAS_DEVICE_ADDR:
13604 case OMP_CLAUSE_IS_DEVICE_PTR:
13605 ovar = OMP_CLAUSE_DECL (c);
13606 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
13608 if (lang_hooks.decls.omp_array_data (ovar, true))
13609 goto omp_has_device_addr_descr;
13610 while (TREE_CODE (ovar) == INDIRECT_REF
13611 || TREE_CODE (ovar) == ARRAY_REF)
13612 ovar = TREE_OPERAND (ovar, 0);
13614 var = lookup_decl_in_outer_ctx (ovar, ctx);
13616 if (lang_hooks.decls.omp_array_data (ovar, true))
13618 tkind = ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
13619 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR)
13620 ? GOMP_MAP_USE_DEVICE_PTR : GOMP_MAP_FIRSTPRIVATE_INT);
13621 x = build_sender_ref ((splay_tree_key) &DECL_NAME (ovar), ctx);
13623 else if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
13624 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR)
13626 tkind = GOMP_MAP_USE_DEVICE_PTR;
13627 x = build_sender_ref ((splay_tree_key) &DECL_UID (ovar), ctx);
13629 else
13631 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
13632 x = build_sender_ref (ovar, ctx);
13635 if (is_gimple_omp_oacc (ctx->stmt))
13637 gcc_assert (tkind == GOMP_MAP_USE_DEVICE_PTR);
13639 if (OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c))
13640 tkind = GOMP_MAP_USE_DEVICE_PTR_IF_PRESENT;
13643 type = TREE_TYPE (ovar);
13644 if (lang_hooks.decls.omp_array_data (ovar, true))
13645 var = lang_hooks.decls.omp_array_data (var, false);
13646 else if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
13647 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
13648 && !omp_privatize_by_reference (ovar)
13649 && !omp_is_allocatable_or_ptr (ovar))
13650 || TREE_CODE (type) == ARRAY_TYPE)
13651 var = build_fold_addr_expr (var);
13652 else
13654 if (omp_privatize_by_reference (ovar)
13655 || omp_check_optional_argument (ovar, false)
13656 || omp_is_allocatable_or_ptr (ovar))
13658 type = TREE_TYPE (type);
13659 if (POINTER_TYPE_P (type)
13660 && TREE_CODE (type) != ARRAY_TYPE
13661 && ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
13662 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR
13663 && !omp_is_allocatable_or_ptr (ovar))
13664 || (omp_privatize_by_reference (ovar)
13665 && omp_is_allocatable_or_ptr (ovar))))
13666 var = build_simple_mem_ref (var);
13667 var = fold_convert (TREE_TYPE (x), var);
13670 tree present;
13671 present = omp_check_optional_argument (ovar, true);
13672 if (present)
13674 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
13675 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
13676 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
13677 tree new_x = unshare_expr (x);
13678 gimplify_expr (&present, &ilist, NULL, is_gimple_val,
13679 fb_rvalue);
13680 gcond *cond = gimple_build_cond_from_tree (present,
13681 notnull_label,
13682 null_label);
13683 gimple_seq_add_stmt (&ilist, cond);
13684 gimple_seq_add_stmt (&ilist, gimple_build_label (null_label));
13685 gimplify_assign (new_x, null_pointer_node, &ilist);
13686 gimple_seq_add_stmt (&ilist, gimple_build_goto (opt_arg_label));
13687 gimple_seq_add_stmt (&ilist,
13688 gimple_build_label (notnull_label));
13689 gimplify_assign (x, var, &ilist);
13690 gimple_seq_add_stmt (&ilist,
13691 gimple_build_label (opt_arg_label));
13693 else
13694 gimplify_assign (x, var, &ilist);
13695 s = size_int (0);
13696 purpose = size_int (map_idx++);
13697 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13698 gcc_checking_assert (tkind
13699 < (HOST_WIDE_INT_C (1U) << talign_shift));
13700 gcc_checking_assert (tkind
13701 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13702 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13703 build_int_cstu (tkind_type, tkind));
13704 break;
13707 gcc_assert (map_idx == map_cnt);
13709 if (!deep_map_cnt)
13711 DECL_INITIAL (TREE_VEC_ELT (t, 1))
13712 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
13713 DECL_INITIAL (TREE_VEC_ELT (t, 2))
13714 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
13716 for (int i = 1; i <= 2; i++)
13717 if (deep_map_cnt || !TREE_STATIC (TREE_VEC_ELT (t, i)))
13719 tree tmp = TREE_VEC_ELT (t, i);
13720 if (deep_map_cnt)
13722 const char *prefix = (i == 1 ? ".omp_data_sizes0"
13723 : ".omp_data_kinds0");
13724 tree type = (i == 1) ? size_type_node : tkind_type;
13725 type = build_array_type_nelts (type, map_cnt);
13726 tree var = create_tmp_var (type, prefix);
13727 DECL_NAMELESS (var) = 1;
13728 TREE_ADDRESSABLE (var) = 1;
13729 TREE_STATIC (var) = TREE_STATIC (tmp);
13730 DECL_INITIAL (var) = build_constructor (type, i == 1
13731 ? vsize : vkind);
13732 tmp = var;
13733 TREE_STATIC (TREE_VEC_ELT (t, i)) = 0;
13736 gimple_seq initlist = NULL;
13737 force_gimple_operand (build1 (DECL_EXPR, void_type_node, tmp),
13738 &initlist, true, NULL_TREE);
13739 gimple_seq_add_seq (&ilist, initlist);
13741 if (deep_map_cnt)
13743 tree tmp2;
13744 tree call = builtin_decl_explicit (BUILT_IN_MEMCPY);
13745 tmp2 = TYPE_SIZE_UNIT (TREE_TYPE (tmp));
13746 call = build_call_expr_loc (input_location, call, 3,
13747 TREE_VEC_ELT (t, i),
13748 build_fold_addr_expr (tmp), tmp2);
13749 gimplify_and_add (call, &ilist);
13752 if (!TREE_STATIC (tmp))
13754 tree clobber = build_clobber (TREE_TYPE (tmp));
13755 gimple_seq_add_stmt (&olist,
13756 gimple_build_assign (tmp, clobber));
13758 if (deep_map_cnt)
13760 tmp = TREE_VEC_ELT (t, i);
13761 tree call = builtin_decl_explicit (BUILT_IN_FREE);
13762 call = build_call_expr_loc (input_location, call, 1, tmp);
13763 gimplify_and_add (call, &olist);
13764 tree clobber = build_clobber (TREE_TYPE (tmp));
13765 gimple_seq_add_stmt (&olist,
13766 gimple_build_assign (tmp, clobber));
13769 else if (omp_maybe_offloaded_ctx (ctx->outer))
13771 tree id = get_identifier ("omp declare target");
13772 tree decl = TREE_VEC_ELT (t, i);
13773 DECL_ATTRIBUTES (decl)
13774 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (decl));
13775 varpool_node *node = varpool_node::get (decl);
13776 if (node)
13778 node->offloadable = 1;
13779 if (ENABLE_OFFLOADING)
13781 g->have_offload = true;
13782 vec_safe_push (offload_vars, t);
13787 if (deep_map_cnt)
13789 tree call = builtin_decl_explicit (BUILT_IN_FREE);
13790 call = build_call_expr_loc (input_location, call, 1,
13791 TREE_VEC_ELT (t, 0));
13792 gimplify_and_add (call, &olist);
13794 gimplify_expr (&TREE_VEC_ELT (t, 1), &ilist, NULL, is_gimple_val,
13795 fb_rvalue);
13798 tree clobber = build_clobber (TREE_TYPE (ctx->sender_decl));
13799 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
13800 clobber));
13803 /* Once all the expansions are done, sequence all the different
13804 fragments inside gimple_omp_body. */
13806 new_body = NULL;
13808 if (offloaded
13809 && ctx->record_type)
13811 t = ctx->sender_decl;
13812 if (!deep_map_cnt)
13813 t = build_fold_addr_expr_loc (loc, t);
13814 /* fixup_child_record_type might have changed receiver_decl's type. */
13815 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
13816 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctx->sender_decl)))
13817 gimplify_assign (ctx->receiver_decl, t, &new_body);
13818 else
13819 gimple_seq_add_stmt (&new_body,
13820 gimple_build_assign (ctx->receiver_decl, t));
13822 gimple_seq_add_seq (&new_body, fplist);
13824 if (offloaded || data_region)
13826 tree prev = NULL_TREE;
13827 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
13828 switch (OMP_CLAUSE_CODE (c))
13830 tree var, x;
13831 default:
13832 break;
13833 case OMP_CLAUSE_FIRSTPRIVATE:
13834 omp_firstprivatize_data_region:
13835 if (is_gimple_omp_oacc (ctx->stmt))
13836 break;
13837 var = OMP_CLAUSE_DECL (c);
13838 if (omp_privatize_by_reference (var)
13839 || is_gimple_reg_type (TREE_TYPE (var)))
13841 tree new_var = lookup_decl (var, ctx);
13842 tree type;
13843 type = TREE_TYPE (var);
13844 if (omp_privatize_by_reference (var))
13845 type = TREE_TYPE (type);
13846 if ((INTEGRAL_TYPE_P (type)
13847 && TYPE_PRECISION (type) <= POINTER_SIZE)
13848 || TREE_CODE (type) == POINTER_TYPE)
13850 x = build_receiver_ref (var, false, ctx);
13851 if (TREE_CODE (type) != POINTER_TYPE)
13852 x = fold_convert (pointer_sized_int_node, x);
13853 x = fold_convert (type, x);
13854 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13855 fb_rvalue);
13856 if (omp_privatize_by_reference (var))
13858 tree v = create_tmp_var_raw (type, get_name (var));
13859 gimple_add_tmp_var (v);
13860 TREE_ADDRESSABLE (v) = 1;
13861 gimple_seq_add_stmt (&new_body,
13862 gimple_build_assign (v, x));
13863 x = build_fold_addr_expr (v);
13865 gimple_seq_add_stmt (&new_body,
13866 gimple_build_assign (new_var, x));
13868 else
13870 bool by_ref = !omp_privatize_by_reference (var);
13871 x = build_receiver_ref (var, by_ref, ctx);
13872 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13873 fb_rvalue);
13874 gimple_seq_add_stmt (&new_body,
13875 gimple_build_assign (new_var, x));
13878 else if (is_variable_sized (var))
13880 tree pvar = DECL_VALUE_EXPR (var);
13881 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
13882 pvar = TREE_OPERAND (pvar, 0);
13883 gcc_assert (DECL_P (pvar));
13884 tree new_var = lookup_decl (pvar, ctx);
13885 x = build_receiver_ref (var, false, ctx);
13886 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13887 gimple_seq_add_stmt (&new_body,
13888 gimple_build_assign (new_var, x));
13890 break;
13891 case OMP_CLAUSE_PRIVATE:
13892 if (is_gimple_omp_oacc (ctx->stmt))
13893 break;
13894 var = OMP_CLAUSE_DECL (c);
13895 if (omp_privatize_by_reference (var))
13897 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13898 tree new_var = lookup_decl (var, ctx);
13899 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
13900 if (TREE_CONSTANT (x))
13902 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
13903 get_name (var));
13904 gimple_add_tmp_var (x);
13905 TREE_ADDRESSABLE (x) = 1;
13906 x = build_fold_addr_expr_loc (clause_loc, x);
13908 else
13909 break;
13911 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
13912 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13913 gimple_seq_add_stmt (&new_body,
13914 gimple_build_assign (new_var, x));
13916 break;
13917 case OMP_CLAUSE_USE_DEVICE_PTR:
13918 case OMP_CLAUSE_USE_DEVICE_ADDR:
13919 case OMP_CLAUSE_HAS_DEVICE_ADDR:
13920 case OMP_CLAUSE_IS_DEVICE_PTR:
13921 tree new_var;
13922 gimple_seq assign_body;
13923 bool is_array_data;
13924 bool do_optional_check;
13925 assign_body = NULL;
13926 do_optional_check = false;
13927 var = OMP_CLAUSE_DECL (c);
13928 is_array_data = lang_hooks.decls.omp_array_data (var, true) != NULL;
13929 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR && is_array_data)
13930 goto omp_firstprivatize_data_region;
13932 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
13933 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR)
13934 x = build_sender_ref (is_array_data
13935 ? (splay_tree_key) &DECL_NAME (var)
13936 : (splay_tree_key) &DECL_UID (var), ctx);
13937 else
13939 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
13941 while (TREE_CODE (var) == INDIRECT_REF
13942 || TREE_CODE (var) == ARRAY_REF)
13943 var = TREE_OPERAND (var, 0);
13945 x = build_receiver_ref (var, false, ctx);
13948 if (is_array_data)
13950 bool is_ref = omp_privatize_by_reference (var);
13951 do_optional_check = true;
13952 /* First, we copy the descriptor data from the host; then
13953 we update its data to point to the target address. */
13954 new_var = lookup_decl (var, ctx);
13955 new_var = DECL_VALUE_EXPR (new_var);
13956 tree v = new_var;
13957 tree v2 = var;
13958 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR
13959 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR)
13960 v2 = maybe_lookup_decl_in_outer_ctx (var, ctx);
13962 if (is_ref)
13964 v2 = build_fold_indirect_ref (v2);
13965 v = create_tmp_var_raw (TREE_TYPE (v2), get_name (var));
13966 gimple_add_tmp_var (v);
13967 TREE_ADDRESSABLE (v) = 1;
13968 gimplify_assign (v, v2, &assign_body);
13969 tree rhs = build_fold_addr_expr (v);
13970 gimple_seq_add_stmt (&assign_body,
13971 gimple_build_assign (new_var, rhs));
13973 else
13974 gimplify_assign (new_var, v2, &assign_body);
13976 v2 = lang_hooks.decls.omp_array_data (unshare_expr (v), false);
13977 gcc_assert (v2);
13978 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13979 gimple_seq_add_stmt (&assign_body,
13980 gimple_build_assign (v2, x));
13982 else if (is_variable_sized (var))
13984 tree pvar = DECL_VALUE_EXPR (var);
13985 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
13986 pvar = TREE_OPERAND (pvar, 0);
13987 gcc_assert (DECL_P (pvar));
13988 new_var = lookup_decl (pvar, ctx);
13989 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13990 gimple_seq_add_stmt (&assign_body,
13991 gimple_build_assign (new_var, x));
13993 else if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
13994 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
13995 && !omp_privatize_by_reference (var)
13996 && !omp_is_allocatable_or_ptr (var))
13997 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
13999 new_var = lookup_decl (var, ctx);
14000 new_var = DECL_VALUE_EXPR (new_var);
14001 gcc_assert (TREE_CODE (new_var) == MEM_REF);
14002 new_var = TREE_OPERAND (new_var, 0);
14003 gcc_assert (DECL_P (new_var));
14004 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
14005 gimple_seq_add_stmt (&assign_body,
14006 gimple_build_assign (new_var, x));
14008 else
14010 tree type = TREE_TYPE (var);
14011 new_var = lookup_decl (var, ctx);
14012 if (omp_privatize_by_reference (var))
14014 type = TREE_TYPE (type);
14015 if (POINTER_TYPE_P (type)
14016 && TREE_CODE (type) != ARRAY_TYPE
14017 && ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
14018 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR)
14019 || (omp_privatize_by_reference (var)
14020 && omp_is_allocatable_or_ptr (var))))
14022 tree v = create_tmp_var_raw (type, get_name (var));
14023 gimple_add_tmp_var (v);
14024 TREE_ADDRESSABLE (v) = 1;
14025 x = fold_convert (type, x);
14026 gimplify_expr (&x, &assign_body, NULL, is_gimple_val,
14027 fb_rvalue);
14028 gimple_seq_add_stmt (&assign_body,
14029 gimple_build_assign (v, x));
14030 x = build_fold_addr_expr (v);
14031 do_optional_check = true;
14034 new_var = DECL_VALUE_EXPR (new_var);
14035 x = fold_convert (TREE_TYPE (new_var), x);
14036 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
14037 gimple_seq_add_stmt (&assign_body,
14038 gimple_build_assign (new_var, x));
14040 tree present;
14041 present = ((do_optional_check
14042 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR
14043 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
14044 ? omp_check_optional_argument (OMP_CLAUSE_DECL (c), true)
14045 : NULL_TREE);
14046 if (present)
14048 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
14049 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
14050 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
14051 glabel *null_glabel = gimple_build_label (null_label);
14052 glabel *notnull_glabel = gimple_build_label (notnull_label);
14053 ggoto *opt_arg_ggoto = gimple_build_goto (opt_arg_label);
14054 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
14055 fb_rvalue);
14056 gimplify_expr (&present, &new_body, NULL, is_gimple_val,
14057 fb_rvalue);
14058 gcond *cond = gimple_build_cond_from_tree (present,
14059 notnull_label,
14060 null_label);
14061 gimple_seq_add_stmt (&new_body, cond);
14062 gimple_seq_add_stmt (&new_body, null_glabel);
14063 gimplify_assign (new_var, null_pointer_node, &new_body);
14064 gimple_seq_add_stmt (&new_body, opt_arg_ggoto);
14065 gimple_seq_add_stmt (&new_body, notnull_glabel);
14066 gimple_seq_add_seq (&new_body, assign_body);
14067 gimple_seq_add_stmt (&new_body,
14068 gimple_build_label (opt_arg_label));
14070 else
14071 gimple_seq_add_seq (&new_body, assign_body);
14072 break;
14074 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
14075 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
14076 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
14077 or references to VLAs. */
14078 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
14079 switch (OMP_CLAUSE_CODE (c))
14081 tree var;
14082 default:
14083 break;
14084 case OMP_CLAUSE_MAP:
14085 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
14086 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
14088 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
14089 poly_int64 offset = 0;
14090 gcc_assert (prev);
14091 var = OMP_CLAUSE_DECL (c);
14092 if (DECL_P (var)
14093 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
14094 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
14095 ctx))
14096 && varpool_node::get_create (var)->offloadable)
14097 break;
14098 if (TREE_CODE (var) == INDIRECT_REF
14099 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
14100 var = TREE_OPERAND (var, 0);
14101 if (TREE_CODE (var) == COMPONENT_REF)
14103 var = get_addr_base_and_unit_offset (var, &offset);
14104 gcc_assert (var != NULL_TREE && DECL_P (var));
14106 else if (DECL_SIZE (var)
14107 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
14109 tree var2 = DECL_VALUE_EXPR (var);
14110 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
14111 var2 = TREE_OPERAND (var2, 0);
14112 gcc_assert (DECL_P (var2));
14113 var = var2;
14115 tree new_var = lookup_decl (var, ctx), x;
14116 tree type = TREE_TYPE (new_var);
14117 bool is_ref;
14118 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
14119 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
14120 == COMPONENT_REF))
14122 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
14123 is_ref = true;
14124 new_var = build2 (MEM_REF, type,
14125 build_fold_addr_expr (new_var),
14126 build_int_cst (build_pointer_type (type),
14127 offset));
14129 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
14131 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
14132 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
14133 new_var = build2 (MEM_REF, type,
14134 build_fold_addr_expr (new_var),
14135 build_int_cst (build_pointer_type (type),
14136 offset));
14138 else
14139 is_ref = omp_privatize_by_reference (var);
14140 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
14141 is_ref = false;
14142 bool ref_to_array = false;
14143 bool ref_to_ptr = false;
14144 if (is_ref)
14146 type = TREE_TYPE (type);
14147 if (TREE_CODE (type) == ARRAY_TYPE)
14149 type = build_pointer_type (type);
14150 ref_to_array = true;
14153 else if (TREE_CODE (type) == ARRAY_TYPE)
14155 tree decl2 = DECL_VALUE_EXPR (new_var);
14156 gcc_assert (TREE_CODE (decl2) == MEM_REF);
14157 decl2 = TREE_OPERAND (decl2, 0);
14158 gcc_assert (DECL_P (decl2));
14159 new_var = decl2;
14160 type = TREE_TYPE (new_var);
14162 else if (TREE_CODE (type) == REFERENCE_TYPE
14163 && TREE_CODE (TREE_TYPE (type)) == POINTER_TYPE)
14165 type = TREE_TYPE (type);
14166 ref_to_ptr = true;
14168 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
14169 x = fold_convert_loc (clause_loc, type, x);
14170 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
14172 tree bias = OMP_CLAUSE_SIZE (c);
14173 if (DECL_P (bias))
14174 bias = lookup_decl (bias, ctx);
14175 bias = fold_convert_loc (clause_loc, sizetype, bias);
14176 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
14177 bias);
14178 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
14179 TREE_TYPE (x), x, bias);
14181 if (ref_to_array)
14182 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
14183 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
14184 if ((is_ref && !ref_to_array)
14185 || ref_to_ptr)
14187 tree t = create_tmp_var_raw (type, get_name (var));
14188 gimple_add_tmp_var (t);
14189 TREE_ADDRESSABLE (t) = 1;
14190 gimple_seq_add_stmt (&new_body,
14191 gimple_build_assign (t, x));
14192 x = build_fold_addr_expr_loc (clause_loc, t);
14194 gimple_seq_add_stmt (&new_body,
14195 gimple_build_assign (new_var, x));
14196 prev = NULL_TREE;
14198 else if (OMP_CLAUSE_CHAIN (c)
14199 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
14200 == OMP_CLAUSE_MAP
14201 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
14202 == GOMP_MAP_FIRSTPRIVATE_POINTER
14203 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
14204 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
14205 prev = c;
14206 break;
14207 case OMP_CLAUSE_PRIVATE:
14208 var = OMP_CLAUSE_DECL (c);
14209 if (is_variable_sized (var))
14211 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
14212 tree new_var = lookup_decl (var, ctx);
14213 tree pvar = DECL_VALUE_EXPR (var);
14214 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
14215 pvar = TREE_OPERAND (pvar, 0);
14216 gcc_assert (DECL_P (pvar));
14217 tree new_pvar = lookup_decl (pvar, ctx);
14218 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
14219 tree al = size_int (DECL_ALIGN (var));
14220 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
14221 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
14222 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
14223 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
14224 gimple_seq_add_stmt (&new_body,
14225 gimple_build_assign (new_pvar, x));
14227 else if (omp_privatize_by_reference (var)
14228 && !is_gimple_omp_oacc (ctx->stmt))
14230 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
14231 tree new_var = lookup_decl (var, ctx);
14232 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
14233 if (TREE_CONSTANT (x))
14234 break;
14235 else
14237 tree atmp
14238 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
14239 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
14240 tree al = size_int (TYPE_ALIGN (rtype));
14241 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
14244 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
14245 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
14246 gimple_seq_add_stmt (&new_body,
14247 gimple_build_assign (new_var, x));
14249 break;
14252 gimple_seq fork_seq = NULL;
14253 gimple_seq join_seq = NULL;
14255 if (offloaded && is_gimple_omp_oacc (ctx->stmt))
14257 /* If there are reductions on the offloaded region itself, treat
14258 them as a dummy GANG loop. */
14259 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
14261 gcall *private_marker = lower_oacc_private_marker (ctx);
14263 if (private_marker)
14264 gimple_call_set_arg (private_marker, 2, level);
14266 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
14267 false, NULL, private_marker, NULL, &fork_seq,
14268 &join_seq, ctx);
14271 gimple_seq_add_seq (&new_body, fork_seq);
14272 gimple_seq_add_seq (&new_body, tgt_body);
14273 gimple_seq_add_seq (&new_body, join_seq);
14275 if (offloaded)
14277 new_body = maybe_catch_exception (new_body);
14278 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
14280 gimple_omp_set_body (stmt, new_body);
14283 bind = gimple_build_bind (NULL, NULL,
14284 tgt_bind ? gimple_bind_block (tgt_bind)
14285 : NULL_TREE);
14286 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
14287 gimple_bind_add_seq (bind, ilist);
14288 gimple_bind_add_stmt (bind, stmt);
14289 gimple_bind_add_seq (bind, olist);
14291 pop_gimplify_context (NULL);
14293 if (dep_bind)
14295 gimple_bind_add_seq (dep_bind, dep_ilist);
14296 gimple_bind_add_stmt (dep_bind, bind);
14297 gimple_bind_add_seq (dep_bind, dep_olist);
14298 pop_gimplify_context (dep_bind);
14302 /* Expand code for an OpenMP teams directive. */
14304 static void
14305 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
14307 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
14308 push_gimplify_context ();
14310 tree block = make_node (BLOCK);
14311 gbind *bind = gimple_build_bind (NULL, NULL, block);
14312 gsi_replace (gsi_p, bind, true);
14313 gimple_seq bind_body = NULL;
14314 gimple_seq dlist = NULL;
14315 gimple_seq olist = NULL;
14317 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
14318 OMP_CLAUSE_NUM_TEAMS);
14319 tree num_teams_lower = NULL_TREE;
14320 if (num_teams == NULL_TREE)
14321 num_teams = build_int_cst (unsigned_type_node, 0);
14322 else
14324 num_teams_lower = OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (num_teams);
14325 if (num_teams_lower)
14327 num_teams_lower = fold_convert (unsigned_type_node, num_teams_lower);
14328 gimplify_expr (&num_teams_lower, &bind_body, NULL, is_gimple_val,
14329 fb_rvalue);
14331 num_teams = OMP_CLAUSE_NUM_TEAMS_UPPER_EXPR (num_teams);
14332 num_teams = fold_convert (unsigned_type_node, num_teams);
14333 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
14335 if (num_teams_lower == NULL_TREE)
14336 num_teams_lower = num_teams;
14337 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
14338 OMP_CLAUSE_THREAD_LIMIT);
14339 if (thread_limit == NULL_TREE)
14340 thread_limit = build_int_cst (unsigned_type_node, 0);
14341 else
14343 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
14344 thread_limit = fold_convert (unsigned_type_node, thread_limit);
14345 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
14346 fb_rvalue);
14348 location_t loc = gimple_location (teams_stmt);
14349 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS4);
14350 tree rettype = TREE_TYPE (TREE_TYPE (decl));
14351 tree first = create_tmp_var (rettype);
14352 gimple_seq_add_stmt (&bind_body,
14353 gimple_build_assign (first, build_one_cst (rettype)));
14354 tree llabel = create_artificial_label (loc);
14355 gimple_seq_add_stmt (&bind_body, gimple_build_label (llabel));
14356 gimple *call
14357 = gimple_build_call (decl, 4, num_teams_lower, num_teams, thread_limit,
14358 first);
14359 gimple_set_location (call, loc);
14360 tree temp = create_tmp_var (rettype);
14361 gimple_call_set_lhs (call, temp);
14362 gimple_seq_add_stmt (&bind_body, call);
14364 tree tlabel = create_artificial_label (loc);
14365 tree flabel = create_artificial_label (loc);
14366 gimple *cond = gimple_build_cond (NE_EXPR, temp, build_zero_cst (rettype),
14367 tlabel, flabel);
14368 gimple_seq_add_stmt (&bind_body, cond);
14369 gimple_seq_add_stmt (&bind_body, gimple_build_label (tlabel));
14370 gimple_seq_add_stmt (&bind_body,
14371 gimple_build_assign (first, build_zero_cst (rettype)));
14373 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
14374 &bind_body, &dlist, ctx, NULL);
14375 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
14376 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist,
14377 NULL, ctx);
14378 gimple_seq_add_stmt (&bind_body, teams_stmt);
14380 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
14381 gimple_omp_set_body (teams_stmt, NULL);
14382 gimple_seq_add_seq (&bind_body, olist);
14383 gimple_seq_add_seq (&bind_body, dlist);
14384 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
14385 gimple_seq_add_stmt (&bind_body, gimple_build_goto (llabel));
14386 gimple_seq_add_stmt (&bind_body, gimple_build_label (flabel));
14387 gimple_bind_set_body (bind, bind_body);
14389 pop_gimplify_context (bind);
14391 gimple_bind_append_vars (bind, ctx->block_vars);
14392 BLOCK_VARS (block) = ctx->block_vars;
14393 if (BLOCK_VARS (block))
14394 TREE_USED (block) = 1;
14397 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
14398 regimplified. If DATA is non-NULL, lower_omp_1 is outside
14399 of OMP context, but with make_addressable_vars set. */
14401 static tree
14402 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
14403 void *data)
14405 tree t = *tp;
14407 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
14408 if ((VAR_P (t) || TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
14409 && data == NULL
14410 && DECL_HAS_VALUE_EXPR_P (t))
14411 return t;
14413 if (make_addressable_vars
14414 && DECL_P (t)
14415 && bitmap_bit_p (make_addressable_vars, DECL_UID (t)))
14416 return t;
14418 /* If a global variable has been privatized, TREE_CONSTANT on
14419 ADDR_EXPR might be wrong. */
14420 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
14421 recompute_tree_invariant_for_addr_expr (t);
14423 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
14424 return NULL_TREE;
14427 /* Data to be communicated between lower_omp_regimplify_operands and
14428 lower_omp_regimplify_operands_p. */
14430 struct lower_omp_regimplify_operands_data
14432 omp_context *ctx;
14433 vec<tree> *decls;
14436 /* Helper function for lower_omp_regimplify_operands. Find
14437 omp_member_access_dummy_var vars and adjust temporarily their
14438 DECL_VALUE_EXPRs if needed. */
14440 static tree
14441 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
14442 void *data)
14444 tree t = omp_member_access_dummy_var (*tp);
14445 if (t)
14447 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
14448 lower_omp_regimplify_operands_data *ldata
14449 = (lower_omp_regimplify_operands_data *) wi->info;
14450 tree o = maybe_lookup_decl (t, ldata->ctx);
14451 if (o != t)
14453 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
14454 ldata->decls->safe_push (*tp);
14455 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
14456 SET_DECL_VALUE_EXPR (*tp, v);
14459 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
14460 return NULL_TREE;
14463 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
14464 of omp_member_access_dummy_var vars during regimplification. */
14466 static void
14467 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
14468 gimple_stmt_iterator *gsi_p)
14470 auto_vec<tree, 10> decls;
14471 if (ctx)
14473 struct walk_stmt_info wi;
14474 memset (&wi, '\0', sizeof (wi));
14475 struct lower_omp_regimplify_operands_data data;
14476 data.ctx = ctx;
14477 data.decls = &decls;
14478 wi.info = &data;
14479 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
14481 gimple_regimplify_operands (stmt, gsi_p);
14482 while (!decls.is_empty ())
14484 tree t = decls.pop ();
14485 tree v = decls.pop ();
14486 SET_DECL_VALUE_EXPR (t, v);
14490 static void
14491 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
14493 gimple *stmt = gsi_stmt (*gsi_p);
14494 struct walk_stmt_info wi;
14495 gcall *call_stmt;
14497 if (gimple_has_location (stmt))
14498 input_location = gimple_location (stmt);
14500 if (make_addressable_vars)
14501 memset (&wi, '\0', sizeof (wi));
14503 /* If we have issued syntax errors, avoid doing any heavy lifting.
14504 Just replace the OMP directives with a NOP to avoid
14505 confusing RTL expansion. */
14506 if (seen_error () && is_gimple_omp (stmt))
14508 gsi_replace (gsi_p, gimple_build_nop (), true);
14509 return;
14512 switch (gimple_code (stmt))
14514 case GIMPLE_COND:
14516 gcond *cond_stmt = as_a <gcond *> (stmt);
14517 if ((ctx || make_addressable_vars)
14518 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
14519 lower_omp_regimplify_p,
14520 ctx ? NULL : &wi, NULL)
14521 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
14522 lower_omp_regimplify_p,
14523 ctx ? NULL : &wi, NULL)))
14524 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
14526 break;
14527 case GIMPLE_CATCH:
14528 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
14529 break;
14530 case GIMPLE_EH_FILTER:
14531 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
14532 break;
14533 case GIMPLE_TRY:
14534 lower_omp (gimple_try_eval_ptr (stmt), ctx);
14535 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
14536 break;
14537 case GIMPLE_ASSUME:
14538 lower_omp (gimple_assume_body_ptr (stmt), ctx);
14539 break;
14540 case GIMPLE_TRANSACTION:
14541 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
14542 ctx);
14543 break;
14544 case GIMPLE_BIND:
14545 if (ctx && is_gimple_omp_oacc (ctx->stmt))
14547 tree vars = gimple_bind_vars (as_a <gbind *> (stmt));
14548 oacc_privatization_scan_decl_chain (ctx, vars);
14550 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
14551 maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
14552 break;
14553 case GIMPLE_OMP_PARALLEL:
14554 case GIMPLE_OMP_TASK:
14555 ctx = maybe_lookup_ctx (stmt);
14556 gcc_assert (ctx);
14557 if (ctx->cancellable)
14558 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
14559 lower_omp_taskreg (gsi_p, ctx);
14560 break;
14561 case GIMPLE_OMP_FOR:
14562 ctx = maybe_lookup_ctx (stmt);
14563 gcc_assert (ctx);
14564 if (ctx->cancellable)
14565 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
14566 lower_omp_for (gsi_p, ctx);
14567 break;
14568 case GIMPLE_OMP_SECTIONS:
14569 ctx = maybe_lookup_ctx (stmt);
14570 gcc_assert (ctx);
14571 if (ctx->cancellable)
14572 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
14573 lower_omp_sections (gsi_p, ctx);
14574 break;
14575 case GIMPLE_OMP_SCOPE:
14576 ctx = maybe_lookup_ctx (stmt);
14577 gcc_assert (ctx);
14578 lower_omp_scope (gsi_p, ctx);
14579 break;
14580 case GIMPLE_OMP_SINGLE:
14581 ctx = maybe_lookup_ctx (stmt);
14582 gcc_assert (ctx);
14583 lower_omp_single (gsi_p, ctx);
14584 break;
14585 case GIMPLE_OMP_STRUCTURED_BLOCK:
14586 /* We have already done error checking at this point, so these nodes
14587 can be completely removed and replaced with their body. */
14588 ctx = maybe_lookup_ctx (stmt);
14589 gcc_assert (ctx);
14590 lower_omp (gimple_omp_body_ptr (stmt), ctx);
14591 gsi_replace_with_seq (gsi_p, gimple_omp_body (stmt), true);
14592 break;
14593 case GIMPLE_OMP_MASTER:
14594 case GIMPLE_OMP_MASKED:
14595 ctx = maybe_lookup_ctx (stmt);
14596 gcc_assert (ctx);
14597 lower_omp_master (gsi_p, ctx);
14598 break;
14599 case GIMPLE_OMP_TASKGROUP:
14600 ctx = maybe_lookup_ctx (stmt);
14601 gcc_assert (ctx);
14602 lower_omp_taskgroup (gsi_p, ctx);
14603 break;
14604 case GIMPLE_OMP_ORDERED:
14605 ctx = maybe_lookup_ctx (stmt);
14606 gcc_assert (ctx);
14607 lower_omp_ordered (gsi_p, ctx);
14608 break;
14609 case GIMPLE_OMP_SCAN:
14610 ctx = maybe_lookup_ctx (stmt);
14611 gcc_assert (ctx);
14612 lower_omp_scan (gsi_p, ctx);
14613 break;
14614 case GIMPLE_OMP_CRITICAL:
14615 ctx = maybe_lookup_ctx (stmt);
14616 gcc_assert (ctx);
14617 lower_omp_critical (gsi_p, ctx);
14618 break;
14619 case GIMPLE_OMP_ATOMIC_LOAD:
14620 if ((ctx || make_addressable_vars)
14621 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
14622 as_a <gomp_atomic_load *> (stmt)),
14623 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
14624 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
14625 break;
14626 case GIMPLE_OMP_TARGET:
14627 ctx = maybe_lookup_ctx (stmt);
14628 gcc_assert (ctx);
14629 lower_omp_target (gsi_p, ctx);
14630 break;
14631 case GIMPLE_OMP_TEAMS:
14632 ctx = maybe_lookup_ctx (stmt);
14633 gcc_assert (ctx);
14634 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
14635 lower_omp_taskreg (gsi_p, ctx);
14636 else
14637 lower_omp_teams (gsi_p, ctx);
14638 break;
14639 case GIMPLE_CALL:
14640 tree fndecl;
14641 call_stmt = as_a <gcall *> (stmt);
14642 fndecl = gimple_call_fndecl (call_stmt);
14643 if (fndecl
14644 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
14645 switch (DECL_FUNCTION_CODE (fndecl))
14647 case BUILT_IN_GOMP_BARRIER:
14648 if (ctx == NULL)
14649 break;
14650 /* FALLTHRU */
14651 case BUILT_IN_GOMP_CANCEL:
14652 case BUILT_IN_GOMP_CANCELLATION_POINT:
14653 omp_context *cctx;
14654 cctx = ctx;
14655 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
14656 cctx = cctx->outer;
14657 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
14658 if (!cctx->cancellable)
14660 if (DECL_FUNCTION_CODE (fndecl)
14661 == BUILT_IN_GOMP_CANCELLATION_POINT)
14663 stmt = gimple_build_nop ();
14664 gsi_replace (gsi_p, stmt, false);
14666 break;
14668 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
14670 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
14671 gimple_call_set_fndecl (call_stmt, fndecl);
14672 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
14674 tree lhs;
14675 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
14676 gimple_call_set_lhs (call_stmt, lhs);
14677 tree fallthru_label;
14678 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
14679 gimple *g;
14680 g = gimple_build_label (fallthru_label);
14681 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
14682 g = gimple_build_cond (NE_EXPR, lhs,
14683 fold_convert (TREE_TYPE (lhs),
14684 boolean_false_node),
14685 cctx->cancel_label, fallthru_label);
14686 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
14687 break;
14688 default:
14689 break;
14691 goto regimplify;
14693 case GIMPLE_ASSIGN:
14694 for (omp_context *up = ctx; up; up = up->outer)
14696 if (gimple_code (up->stmt) == GIMPLE_OMP_ORDERED
14697 || gimple_code (up->stmt) == GIMPLE_OMP_CRITICAL
14698 || gimple_code (up->stmt) == GIMPLE_OMP_TASKGROUP
14699 || gimple_code (up->stmt) == GIMPLE_OMP_SCOPE
14700 || gimple_code (up->stmt) == GIMPLE_OMP_SECTION
14701 || gimple_code (up->stmt) == GIMPLE_OMP_SCAN
14702 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
14703 && (gimple_omp_target_kind (up->stmt)
14704 == GF_OMP_TARGET_KIND_DATA)))
14705 continue;
14706 else if (!up->lastprivate_conditional_map)
14707 break;
14708 tree lhs = get_base_address (gimple_assign_lhs (stmt));
14709 if (TREE_CODE (lhs) == MEM_REF
14710 && DECL_P (TREE_OPERAND (lhs, 0))
14711 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs,
14712 0))) == REFERENCE_TYPE)
14713 lhs = TREE_OPERAND (lhs, 0);
14714 if (DECL_P (lhs))
14715 if (tree *v = up->lastprivate_conditional_map->get (lhs))
14717 tree clauses;
14718 if (up->combined_into_simd_safelen1)
14720 up = up->outer;
14721 if (gimple_code (up->stmt) == GIMPLE_OMP_SCAN)
14722 up = up->outer;
14724 if (gimple_code (up->stmt) == GIMPLE_OMP_FOR)
14725 clauses = gimple_omp_for_clauses (up->stmt);
14726 else
14727 clauses = gimple_omp_sections_clauses (up->stmt);
14728 tree c = omp_find_clause (clauses, OMP_CLAUSE__CONDTEMP_);
14729 if (!OMP_CLAUSE__CONDTEMP__ITER (c))
14730 c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
14731 OMP_CLAUSE__CONDTEMP_);
14732 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c));
14733 gimple *g = gimple_build_assign (*v, OMP_CLAUSE_DECL (c));
14734 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
14737 /* FALLTHRU */
14739 default:
14740 regimplify:
14741 if ((ctx || make_addressable_vars)
14742 && walk_gimple_op (stmt, lower_omp_regimplify_p,
14743 ctx ? NULL : &wi))
14745 /* Just remove clobbers, this should happen only if we have
14746 "privatized" local addressable variables in SIMD regions,
14747 the clobber isn't needed in that case and gimplifying address
14748 of the ARRAY_REF into a pointer and creating MEM_REF based
14749 clobber would create worse code than we get with the clobber
14750 dropped. */
14751 if (gimple_clobber_p (stmt))
14753 gsi_replace (gsi_p, gimple_build_nop (), true);
14754 break;
14756 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
14758 break;
14762 static void
14763 lower_omp (gimple_seq *body, omp_context *ctx)
14765 location_t saved_location = input_location;
14766 gimple_stmt_iterator gsi;
14767 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
14768 lower_omp_1 (&gsi, ctx);
14769 /* During gimplification, we haven't folded statments inside offloading
14770 or taskreg regions (gimplify.cc:maybe_fold_stmt); do that now. */
14771 if (target_nesting_level || taskreg_nesting_level)
14772 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
14773 fold_stmt (&gsi);
14774 input_location = saved_location;
14777 /* Main entry point. */
14779 static unsigned int
14780 execute_lower_omp (void)
14782 gimple_seq body;
14783 int i;
14784 omp_context *ctx;
14786 /* This pass always runs, to provide PROP_gimple_lomp.
14787 But often, there is nothing to do. */
14788 if (flag_openacc == 0 && flag_openmp == 0
14789 && flag_openmp_simd == 0)
14790 return 0;
14792 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
14793 delete_omp_context);
14795 body = gimple_body (current_function_decl);
14797 scan_omp (&body, NULL);
14798 gcc_assert (taskreg_nesting_level == 0);
14799 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
14800 finish_taskreg_scan (ctx);
14801 taskreg_contexts.release ();
14803 if (all_contexts->root)
14805 if (make_addressable_vars)
14806 push_gimplify_context ();
14807 lower_omp (&body, NULL);
14808 if (make_addressable_vars)
14809 pop_gimplify_context (NULL);
14812 if (all_contexts)
14814 splay_tree_delete (all_contexts);
14815 all_contexts = NULL;
14817 BITMAP_FREE (make_addressable_vars);
14818 BITMAP_FREE (global_nonaddressable_vars);
14820 /* If current function is a method, remove artificial dummy VAR_DECL created
14821 for non-static data member privatization, they aren't needed for
14822 debuginfo nor anything else, have been already replaced everywhere in the
14823 IL and cause problems with LTO. */
14824 if (DECL_ARGUMENTS (current_function_decl)
14825 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
14826 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
14827 == POINTER_TYPE))
14828 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
14830 for (auto task_stmt : task_cpyfns)
14831 finalize_task_copyfn (task_stmt);
14832 task_cpyfns.release ();
14833 return 0;
14836 namespace {
14838 const pass_data pass_data_lower_omp =
14840 GIMPLE_PASS, /* type */
14841 "omplower", /* name */
14842 OPTGROUP_OMP, /* optinfo_flags */
14843 TV_NONE, /* tv_id */
14844 PROP_gimple_any, /* properties_required */
14845 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
14846 0, /* properties_destroyed */
14847 0, /* todo_flags_start */
14848 0, /* todo_flags_finish */
14851 class pass_lower_omp : public gimple_opt_pass
14853 public:
14854 pass_lower_omp (gcc::context *ctxt)
14855 : gimple_opt_pass (pass_data_lower_omp, ctxt)
14858 /* opt_pass methods: */
14859 unsigned int execute (function *) final override
14861 return execute_lower_omp ();
14864 }; // class pass_lower_omp
14866 } // anon namespace
14868 gimple_opt_pass *
14869 make_pass_lower_omp (gcc::context *ctxt)
14871 return new pass_lower_omp (ctxt);
14874 /* The following is a utility to diagnose structured block violations.
14875 It is not part of the "omplower" pass, as that's invoked too late. It
14876 should be invoked by the respective front ends after gimplification. */
14878 static splay_tree all_labels;
14880 /* Check for mismatched contexts and generate an error if needed. Return
14881 true if an error is detected. */
14883 static bool
14884 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
14885 gimple *branch_ctx, gimple *label_ctx)
14887 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
14888 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
14890 if (label_ctx == branch_ctx)
14891 return false;
14893 const char* kind = NULL;
14895 if (flag_openacc)
14897 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
14898 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
14900 gcc_checking_assert (kind == NULL);
14901 kind = "OpenACC";
14904 if (kind == NULL)
14906 gcc_checking_assert (flag_openmp || flag_openmp_simd);
14907 kind = "OpenMP";
14910 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
14911 so we could traverse it and issue a correct "exit" or "enter" error
14912 message upon a structured block violation.
14914 We built the context by building a list with tree_cons'ing, but there is
14915 no easy counterpart in gimple tuples. It seems like far too much work
14916 for issuing exit/enter error messages. If someone really misses the
14917 distinct error message... patches welcome. */
14919 #if 0
14920 /* Try to avoid confusing the user by producing and error message
14921 with correct "exit" or "enter" verbiage. We prefer "exit"
14922 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
14923 if (branch_ctx == NULL)
14924 exit_p = false;
14925 else
14927 while (label_ctx)
14929 if (TREE_VALUE (label_ctx) == branch_ctx)
14931 exit_p = false;
14932 break;
14934 label_ctx = TREE_CHAIN (label_ctx);
14938 if (exit_p)
14939 error ("invalid exit from %s structured block", kind);
14940 else
14941 error ("invalid entry to %s structured block", kind);
14942 #endif
14944 /* If it's obvious we have an invalid entry, be specific about the error. */
14945 if (branch_ctx == NULL)
14946 error ("invalid entry to %s structured block", kind);
14947 else
14949 /* Otherwise, be vague and lazy, but efficient. */
14950 error ("invalid branch to/from %s structured block", kind);
14953 gsi_replace (gsi_p, gimple_build_nop (), false);
14954 return true;
14957 /* Pass 1: Create a minimal tree of structured blocks, and record
14958 where each label is found. */
14960 static tree
14961 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
14962 struct walk_stmt_info *wi)
14964 gimple *context = (gimple *) wi->info;
14965 gimple *inner_context;
14966 gimple *stmt = gsi_stmt (*gsi_p);
14968 *handled_ops_p = true;
14970 switch (gimple_code (stmt))
14972 WALK_SUBSTMTS;
14974 case GIMPLE_OMP_PARALLEL:
14975 case GIMPLE_OMP_TASK:
14976 case GIMPLE_OMP_SCOPE:
14977 case GIMPLE_OMP_SECTIONS:
14978 case GIMPLE_OMP_SINGLE:
14979 case GIMPLE_OMP_SECTION:
14980 case GIMPLE_OMP_STRUCTURED_BLOCK:
14981 case GIMPLE_OMP_MASTER:
14982 case GIMPLE_OMP_MASKED:
14983 case GIMPLE_OMP_ORDERED:
14984 case GIMPLE_OMP_SCAN:
14985 case GIMPLE_OMP_CRITICAL:
14986 case GIMPLE_OMP_TARGET:
14987 case GIMPLE_OMP_TEAMS:
14988 case GIMPLE_OMP_TASKGROUP:
14989 /* The minimal context here is just the current OMP construct. */
14990 inner_context = stmt;
14991 wi->info = inner_context;
14992 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
14993 wi->info = context;
14994 break;
14996 case GIMPLE_OMP_FOR:
14997 inner_context = stmt;
14998 wi->info = inner_context;
14999 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
15000 walk them. */
15001 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
15002 diagnose_sb_1, NULL, wi);
15003 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
15004 wi->info = context;
15005 break;
15007 case GIMPLE_LABEL:
15008 splay_tree_insert (all_labels,
15009 (splay_tree_key) gimple_label_label (
15010 as_a <glabel *> (stmt)),
15011 (splay_tree_value) context);
15012 break;
15014 default:
15015 break;
15018 return NULL_TREE;
15021 /* Pass 2: Check each branch and see if its context differs from that of
15022 the destination label's context. */
15024 static tree
15025 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
15026 struct walk_stmt_info *wi)
15028 gimple *context = (gimple *) wi->info;
15029 splay_tree_node n;
15030 gimple *stmt = gsi_stmt (*gsi_p);
15032 *handled_ops_p = true;
15034 switch (gimple_code (stmt))
15036 WALK_SUBSTMTS;
15038 case GIMPLE_OMP_PARALLEL:
15039 case GIMPLE_OMP_TASK:
15040 case GIMPLE_OMP_SCOPE:
15041 case GIMPLE_OMP_SECTIONS:
15042 case GIMPLE_OMP_SINGLE:
15043 case GIMPLE_OMP_SECTION:
15044 case GIMPLE_OMP_STRUCTURED_BLOCK:
15045 case GIMPLE_OMP_MASTER:
15046 case GIMPLE_OMP_MASKED:
15047 case GIMPLE_OMP_ORDERED:
15048 case GIMPLE_OMP_SCAN:
15049 case GIMPLE_OMP_CRITICAL:
15050 case GIMPLE_OMP_TARGET:
15051 case GIMPLE_OMP_TEAMS:
15052 case GIMPLE_OMP_TASKGROUP:
15053 wi->info = stmt;
15054 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
15055 wi->info = context;
15056 break;
15058 case GIMPLE_OMP_FOR:
15059 wi->info = stmt;
15060 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
15061 walk them. */
15062 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
15063 diagnose_sb_2, NULL, wi);
15064 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
15065 wi->info = context;
15066 break;
15068 case GIMPLE_COND:
15070 gcond *cond_stmt = as_a <gcond *> (stmt);
15071 tree lab = gimple_cond_true_label (cond_stmt);
15072 if (lab)
15074 n = splay_tree_lookup (all_labels,
15075 (splay_tree_key) lab);
15076 diagnose_sb_0 (gsi_p, context,
15077 n ? (gimple *) n->value : NULL);
15079 lab = gimple_cond_false_label (cond_stmt);
15080 if (lab)
15082 n = splay_tree_lookup (all_labels,
15083 (splay_tree_key) lab);
15084 diagnose_sb_0 (gsi_p, context,
15085 n ? (gimple *) n->value : NULL);
15088 break;
15090 case GIMPLE_GOTO:
15092 tree lab = gimple_goto_dest (stmt);
15093 if (TREE_CODE (lab) != LABEL_DECL)
15094 break;
15096 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
15097 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
15099 break;
15101 case GIMPLE_SWITCH:
15103 gswitch *switch_stmt = as_a <gswitch *> (stmt);
15104 unsigned int i;
15105 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
15107 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
15108 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
15109 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
15110 break;
15113 break;
15115 case GIMPLE_RETURN:
15116 diagnose_sb_0 (gsi_p, context, NULL);
15117 break;
15119 default:
15120 break;
15123 return NULL_TREE;
15126 static unsigned int
15127 diagnose_omp_structured_block_errors (void)
15129 struct walk_stmt_info wi;
15130 gimple_seq body = gimple_body (current_function_decl);
15132 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
15134 memset (&wi, 0, sizeof (wi));
15135 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
15137 memset (&wi, 0, sizeof (wi));
15138 wi.want_locations = true;
15139 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
15141 gimple_set_body (current_function_decl, body);
15143 splay_tree_delete (all_labels);
15144 all_labels = NULL;
15146 return 0;
15149 namespace {
15151 const pass_data pass_data_diagnose_omp_blocks =
15153 GIMPLE_PASS, /* type */
15154 "*diagnose_omp_blocks", /* name */
15155 OPTGROUP_OMP, /* optinfo_flags */
15156 TV_NONE, /* tv_id */
15157 PROP_gimple_any, /* properties_required */
15158 0, /* properties_provided */
15159 0, /* properties_destroyed */
15160 0, /* todo_flags_start */
15161 0, /* todo_flags_finish */
15164 class pass_diagnose_omp_blocks : public gimple_opt_pass
15166 public:
15167 pass_diagnose_omp_blocks (gcc::context *ctxt)
15168 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
15171 /* opt_pass methods: */
15172 bool gate (function *) final override
15174 return flag_openacc || flag_openmp || flag_openmp_simd;
15176 unsigned int execute (function *) final override
15178 return diagnose_omp_structured_block_errors ();
15181 }; // class pass_diagnose_omp_blocks
15183 } // anon namespace
15185 gimple_opt_pass *
15186 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
15188 return new pass_diagnose_omp_blocks (ctxt);
15192 #include "gt-omp-low.h"