1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2024 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
33 #include "gimple-predict.h"
34 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
37 #include "tree-pretty-print.h"
38 #include "diagnostic-core.h"
39 #include "diagnostic.h" /* For errorcount. */
41 #include "fold-const.h"
46 #include "gimple-iterator.h"
47 #include "gimple-fold.h"
50 #include "stor-layout.h"
51 #include "print-tree.h"
52 #include "tree-iterator.h"
53 #include "tree-inline.h"
54 #include "langhooks.h"
57 #include "tree-hash-traits.h"
58 #include "omp-general.h"
60 #include "gimple-low.h"
61 #include "gomp-constants.h"
62 #include "splay-tree.h"
63 #include "gimple-walk.h"
64 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
66 #include "stringpool.h"
70 #include "omp-offload.h"
72 #include "tree-nested.h"
74 /* Identifier for a basic condition, mapping it to other basic conditions of
75 its Boolean expression. Basic conditions given the same uid (in the same
76 function) are parts of the same ANDIF/ORIF expression. Used for condition
78 static unsigned nextuid
= 1;
79 /* Get a fresh identifier for a new condition expression. This is used for
80 condition coverage. */
86 /* Reset the condition uid to the value it should have when compiling a new
87 function. 0 is already the default/untouched value, so start at non-zero.
88 A valid and set id should always be > 0. This is used for condition
96 /* Hash set of poisoned variables in a bind expr. */
97 static hash_set
<tree
> *asan_poisoned_variables
= NULL
;
99 enum gimplify_omp_var_data
101 GOVD_SEEN
= 0x000001,
102 GOVD_EXPLICIT
= 0x000002,
103 GOVD_SHARED
= 0x000004,
104 GOVD_PRIVATE
= 0x000008,
105 GOVD_FIRSTPRIVATE
= 0x000010,
106 GOVD_LASTPRIVATE
= 0x000020,
107 GOVD_REDUCTION
= 0x000040,
108 GOVD_LOCAL
= 0x00080,
110 GOVD_DEBUG_PRIVATE
= 0x000200,
111 GOVD_PRIVATE_OUTER_REF
= 0x000400,
112 GOVD_LINEAR
= 0x000800,
113 GOVD_ALIGNED
= 0x001000,
115 /* Flag for GOVD_MAP: don't copy back. */
116 GOVD_MAP_TO_ONLY
= 0x002000,
118 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
119 GOVD_LINEAR_LASTPRIVATE_NO_OUTER
= 0x004000,
121 GOVD_MAP_0LEN_ARRAY
= 0x008000,
123 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
124 GOVD_MAP_ALWAYS_TO
= 0x010000,
126 /* Flag for shared vars that are or might be stored to in the region. */
127 GOVD_WRITTEN
= 0x020000,
129 /* Flag for GOVD_MAP, if it is a forced mapping. */
130 GOVD_MAP_FORCE
= 0x040000,
132 /* Flag for GOVD_MAP: must be present already. */
133 GOVD_MAP_FORCE_PRESENT
= 0x080000,
135 /* Flag for GOVD_MAP: only allocate. */
136 GOVD_MAP_ALLOC_ONLY
= 0x100000,
138 /* Flag for GOVD_MAP: only copy back. */
139 GOVD_MAP_FROM_ONLY
= 0x200000,
141 GOVD_NONTEMPORAL
= 0x400000,
143 /* Flag for GOVD_LASTPRIVATE: conditional modifier. */
144 GOVD_LASTPRIVATE_CONDITIONAL
= 0x800000,
146 GOVD_CONDTEMP
= 0x1000000,
148 /* Flag for GOVD_REDUCTION: inscan seen in {in,ex}clusive clause. */
149 GOVD_REDUCTION_INSCAN
= 0x2000000,
151 /* Flag for GOVD_FIRSTPRIVATE: OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT. */
152 GOVD_FIRSTPRIVATE_IMPLICIT
= 0x4000000,
154 GOVD_DATA_SHARE_CLASS
= (GOVD_SHARED
| GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
155 | GOVD_LASTPRIVATE
| GOVD_REDUCTION
| GOVD_LINEAR
162 ORT_WORKSHARE
= 0x00,
163 ORT_TASKGROUP
= 0x01,
167 ORT_COMBINED_PARALLEL
= ORT_PARALLEL
| 1,
170 ORT_UNTIED_TASK
= ORT_TASK
| 1,
171 ORT_TASKLOOP
= ORT_TASK
| 2,
172 ORT_UNTIED_TASKLOOP
= ORT_UNTIED_TASK
| 2,
175 ORT_COMBINED_TEAMS
= ORT_TEAMS
| 1,
176 ORT_HOST_TEAMS
= ORT_TEAMS
| 2,
177 ORT_COMBINED_HOST_TEAMS
= ORT_COMBINED_TEAMS
| 2,
180 ORT_TARGET_DATA
= 0x40,
182 /* Data region with offloading. */
184 ORT_COMBINED_TARGET
= ORT_TARGET
| 1,
185 ORT_IMPLICIT_TARGET
= ORT_TARGET
| 2,
187 /* OpenACC variants. */
188 ORT_ACC
= 0x100, /* A generic OpenACC region. */
189 ORT_ACC_DATA
= ORT_ACC
| ORT_TARGET_DATA
, /* Data construct. */
190 ORT_ACC_PARALLEL
= ORT_ACC
| ORT_TARGET
, /* Parallel construct */
191 ORT_ACC_KERNELS
= ORT_ACC
| ORT_TARGET
| 2, /* Kernels construct. */
192 ORT_ACC_SERIAL
= ORT_ACC
| ORT_TARGET
| 4, /* Serial construct. */
193 ORT_ACC_HOST_DATA
= ORT_ACC
| ORT_TARGET_DATA
| 2, /* Host data. */
195 /* Dummy OpenMP region, used to disable expansion of
196 DECL_VALUE_EXPRs in taskloop pre body. */
200 /* Gimplify hashtable helper. */
202 struct gimplify_hasher
: free_ptr_hash
<elt_t
>
204 static inline hashval_t
hash (const elt_t
*);
205 static inline bool equal (const elt_t
*, const elt_t
*);
210 struct gimplify_ctx
*prev_context
;
212 vec
<gbind
*> bind_expr_stack
;
214 gimple_seq conditional_cleanups
;
218 vec
<tree
> case_labels
;
219 hash_set
<tree
> *live_switch_vars
;
220 /* The formal temporary table. Should this be persistent? */
221 hash_table
<gimplify_hasher
> *temp_htab
;
224 unsigned into_ssa
: 1;
225 unsigned allow_rhs_cond_expr
: 1;
226 unsigned in_cleanup_point_expr
: 1;
227 unsigned keep_stack
: 1;
228 unsigned save_stack
: 1;
229 unsigned in_switch_expr
: 1;
230 unsigned in_handler_expr
: 1;
233 enum gimplify_defaultmap_kind
236 GDMK_SCALAR_TARGET
, /* w/ Fortran's target attr, implicit mapping, only. */
242 struct gimplify_omp_ctx
244 struct gimplify_omp_ctx
*outer_context
;
245 splay_tree variables
;
246 hash_set
<tree
> *privatized_types
;
248 /* Iteration variables in an OMP_FOR. */
249 vec
<tree
> loop_iter_var
;
251 enum omp_clause_default_kind default_kind
;
252 enum omp_region_type region_type
;
256 bool target_firstprivatize_array_bases
;
258 bool order_concurrent
;
264 static struct gimplify_ctx
*gimplify_ctxp
;
265 static struct gimplify_omp_ctx
*gimplify_omp_ctxp
;
266 static bool in_omp_construct
;
268 /* Forward declaration. */
269 static enum gimplify_status
gimplify_compound_expr (tree
*, gimple_seq
*, bool);
270 static hash_map
<tree
, tree
> *oacc_declare_returns
;
271 static enum gimplify_status
gimplify_expr (tree
*, gimple_seq
*, gimple_seq
*,
272 bool (*) (tree
), fallback_t
, bool);
273 static void prepare_gimple_addressable (tree
*, gimple_seq
*);
275 /* Shorter alias name for the above function for use in gimplify.cc
279 gimplify_seq_add_stmt (gimple_seq
*seq_p
, gimple
*gs
)
281 gimple_seq_add_stmt_without_update (seq_p
, gs
);
284 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
285 NULL, a new sequence is allocated. This function is
286 similar to gimple_seq_add_seq, but does not scan the operands.
287 During gimplification, we need to manipulate statement sequences
288 before the def/use vectors have been constructed. */
291 gimplify_seq_add_seq (gimple_seq
*dst_p
, gimple_seq src
)
293 gimple_stmt_iterator si
;
298 si
= gsi_last (*dst_p
);
299 gsi_insert_seq_after_without_update (&si
, src
, GSI_NEW_STMT
);
303 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
304 and popping gimplify contexts. */
306 static struct gimplify_ctx
*ctx_pool
= NULL
;
308 /* Return a gimplify context struct from the pool. */
310 static inline struct gimplify_ctx
*
313 struct gimplify_ctx
* c
= ctx_pool
;
316 ctx_pool
= c
->prev_context
;
318 c
= XNEW (struct gimplify_ctx
);
320 memset (c
, '\0', sizeof (*c
));
324 /* Put gimplify context C back into the pool. */
327 ctx_free (struct gimplify_ctx
*c
)
329 c
->prev_context
= ctx_pool
;
333 /* Free allocated ctx stack memory. */
336 free_gimplify_stack (void)
338 struct gimplify_ctx
*c
;
340 while ((c
= ctx_pool
))
342 ctx_pool
= c
->prev_context
;
348 /* Set up a context for the gimplifier. */
351 push_gimplify_context (bool in_ssa
, bool rhs_cond_ok
)
353 struct gimplify_ctx
*c
= ctx_alloc ();
355 c
->prev_context
= gimplify_ctxp
;
357 gimplify_ctxp
->into_ssa
= in_ssa
;
358 gimplify_ctxp
->allow_rhs_cond_expr
= rhs_cond_ok
;
361 /* Tear down a context for the gimplifier. If BODY is non-null, then
362 put the temporaries into the outer BIND_EXPR. Otherwise, put them
365 BODY is not a sequence, but the first tuple in a sequence. */
368 pop_gimplify_context (gimple
*body
)
370 struct gimplify_ctx
*c
= gimplify_ctxp
;
373 && (!c
->bind_expr_stack
.exists ()
374 || c
->bind_expr_stack
.is_empty ()));
375 c
->bind_expr_stack
.release ();
376 gimplify_ctxp
= c
->prev_context
;
379 declare_vars (c
->temps
, body
, false);
381 record_vars (c
->temps
);
388 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
391 gimple_push_bind_expr (gbind
*bind_stmt
)
393 gimplify_ctxp
->bind_expr_stack
.reserve (8);
394 gimplify_ctxp
->bind_expr_stack
.safe_push (bind_stmt
);
397 /* Pop the first element off the stack of bindings. */
400 gimple_pop_bind_expr (void)
402 gimplify_ctxp
->bind_expr_stack
.pop ();
405 /* Return the first element of the stack of bindings. */
408 gimple_current_bind_expr (void)
410 return gimplify_ctxp
->bind_expr_stack
.last ();
413 /* Return the stack of bindings created during gimplification. */
416 gimple_bind_expr_stack (void)
418 return gimplify_ctxp
->bind_expr_stack
;
421 /* Return true iff there is a COND_EXPR between us and the innermost
422 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
425 gimple_conditional_context (void)
427 return gimplify_ctxp
->conditions
> 0;
430 /* Note that we've entered a COND_EXPR. */
433 gimple_push_condition (void)
435 #ifdef ENABLE_GIMPLE_CHECKING
436 if (gimplify_ctxp
->conditions
== 0)
437 gcc_assert (gimple_seq_empty_p (gimplify_ctxp
->conditional_cleanups
));
439 ++(gimplify_ctxp
->conditions
);
442 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
443 now, add any conditional cleanups we've seen to the prequeue. */
446 gimple_pop_condition (gimple_seq
*pre_p
)
448 int conds
= --(gimplify_ctxp
->conditions
);
450 gcc_assert (conds
>= 0);
453 gimplify_seq_add_seq (pre_p
, gimplify_ctxp
->conditional_cleanups
);
454 gimplify_ctxp
->conditional_cleanups
= NULL
;
458 /* A stable comparison routine for use with splay trees and DECLs. */
461 splay_tree_compare_decl_uid (splay_tree_key xa
, splay_tree_key xb
)
466 return DECL_UID (a
) - DECL_UID (b
);
469 /* Create a new omp construct that deals with variable remapping. */
471 static struct gimplify_omp_ctx
*
472 new_omp_context (enum omp_region_type region_type
)
474 struct gimplify_omp_ctx
*c
;
476 c
= XCNEW (struct gimplify_omp_ctx
);
477 c
->outer_context
= gimplify_omp_ctxp
;
478 c
->variables
= splay_tree_new (splay_tree_compare_decl_uid
, 0, 0);
479 c
->privatized_types
= new hash_set
<tree
>;
480 c
->location
= input_location
;
481 c
->region_type
= region_type
;
482 if ((region_type
& ORT_TASK
) == 0)
483 c
->default_kind
= OMP_CLAUSE_DEFAULT_SHARED
;
485 c
->default_kind
= OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
486 c
->defaultmap
[GDMK_SCALAR
] = GOVD_MAP
;
487 c
->defaultmap
[GDMK_SCALAR_TARGET
] = GOVD_MAP
;
488 c
->defaultmap
[GDMK_AGGREGATE
] = GOVD_MAP
;
489 c
->defaultmap
[GDMK_ALLOCATABLE
] = GOVD_MAP
;
490 c
->defaultmap
[GDMK_POINTER
] = GOVD_MAP
;
495 /* Destroy an omp construct that deals with variable remapping. */
498 delete_omp_context (struct gimplify_omp_ctx
*c
)
500 splay_tree_delete (c
->variables
);
501 delete c
->privatized_types
;
502 c
->loop_iter_var
.release ();
506 static void omp_add_variable (struct gimplify_omp_ctx
*, tree
, unsigned int);
507 static bool omp_notice_variable (struct gimplify_omp_ctx
*, tree
, bool);
509 /* Both gimplify the statement T and append it to *SEQ_P. This function
510 behaves exactly as gimplify_stmt, but you don't have to pass T as a
514 gimplify_and_add (tree t
, gimple_seq
*seq_p
)
516 gimplify_stmt (&t
, seq_p
);
519 /* Gimplify statement T into sequence *SEQ_P, and return the first
520 tuple in the sequence of generated tuples for this statement.
521 Return NULL if gimplifying T produced no tuples. */
524 gimplify_and_return_first (tree t
, gimple_seq
*seq_p
)
526 gimple_stmt_iterator last
= gsi_last (*seq_p
);
528 gimplify_and_add (t
, seq_p
);
530 if (!gsi_end_p (last
))
533 return gsi_stmt (last
);
536 return gimple_seq_first_stmt (*seq_p
);
539 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
540 LHS, or for a call argument. */
543 is_gimple_mem_rhs (tree t
)
545 /* If we're dealing with a renamable type, either source or dest must be
546 a renamed variable. */
547 if (is_gimple_reg_type (TREE_TYPE (t
)))
548 return is_gimple_val (t
);
550 return is_gimple_val (t
) || is_gimple_lvalue (t
);
553 /* Return true if T is a CALL_EXPR or an expression that can be
554 assigned to a temporary. Note that this predicate should only be
555 used during gimplification. See the rationale for this in
556 gimplify_modify_expr. */
559 is_gimple_reg_rhs_or_call (tree t
)
561 return (get_gimple_rhs_class (TREE_CODE (t
)) != GIMPLE_INVALID_RHS
562 || TREE_CODE (t
) == CALL_EXPR
);
565 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
566 this predicate should only be used during gimplification. See the
567 rationale for this in gimplify_modify_expr. */
570 is_gimple_mem_rhs_or_call (tree t
)
572 /* If we're dealing with a renamable type, either source or dest must be
573 a renamed variable. */
574 if (is_gimple_reg_type (TREE_TYPE (t
)))
575 return is_gimple_val (t
);
577 return (is_gimple_val (t
)
578 || is_gimple_lvalue (t
)
579 || TREE_CLOBBER_P (t
)
580 || TREE_CODE (t
) == CALL_EXPR
);
583 /* Create a temporary with a name derived from VAL. Subroutine of
584 lookup_tmp_var; nobody else should call this function. */
587 create_tmp_from_val (tree val
)
589 /* Drop all qualifiers and address-space information from the value type. */
590 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (val
));
591 tree var
= create_tmp_var (type
, get_name (val
));
595 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
596 an existing expression temporary. If NOT_GIMPLE_REG, mark it as such. */
599 lookup_tmp_var (tree val
, bool is_formal
, bool not_gimple_reg
)
603 /* We cannot mark a formal temporary with DECL_NOT_GIMPLE_REG_P. */
604 gcc_assert (!is_formal
|| !not_gimple_reg
);
606 /* If not optimizing, never really reuse a temporary. local-alloc
607 won't allocate any variable that is used in more than one basic
608 block, which means it will go into memory, causing much extra
609 work in reload and final and poorer code generation, outweighing
610 the extra memory allocation here. */
611 if (!optimize
|| !is_formal
|| TREE_SIDE_EFFECTS (val
))
613 ret
= create_tmp_from_val (val
);
614 DECL_NOT_GIMPLE_REG_P (ret
) = not_gimple_reg
;
622 if (!gimplify_ctxp
->temp_htab
)
623 gimplify_ctxp
->temp_htab
= new hash_table
<gimplify_hasher
> (1000);
624 slot
= gimplify_ctxp
->temp_htab
->find_slot (&elt
, INSERT
);
627 elt_p
= XNEW (elt_t
);
629 elt_p
->temp
= ret
= create_tmp_from_val (val
);
642 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
645 internal_get_tmp_var (tree val
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
646 bool is_formal
, bool allow_ssa
, bool not_gimple_reg
)
650 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
651 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
652 gimplify_expr (&val
, pre_p
, post_p
, is_gimple_reg_rhs_or_call
,
656 && gimplify_ctxp
->into_ssa
657 && is_gimple_reg_type (TREE_TYPE (val
)))
659 t
= make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val
)));
660 if (! gimple_in_ssa_p (cfun
))
662 const char *name
= get_name (val
);
664 SET_SSA_NAME_VAR_OR_IDENTIFIER (t
, create_tmp_var_name (name
));
668 t
= lookup_tmp_var (val
, is_formal
, not_gimple_reg
);
670 mod
= build2 (INIT_EXPR
, TREE_TYPE (t
), t
, unshare_expr (val
));
672 SET_EXPR_LOCATION (mod
, EXPR_LOC_OR_LOC (val
, input_location
));
674 /* gimplify_modify_expr might want to reduce this further. */
675 gimplify_and_add (mod
, pre_p
);
678 /* If we failed to gimplify VAL then we can end up with the temporary
679 SSA name not having a definition. In this case return a decl. */
680 if (TREE_CODE (t
) == SSA_NAME
&& ! SSA_NAME_DEF_STMT (t
))
681 return lookup_tmp_var (val
, is_formal
, not_gimple_reg
);
686 /* Return a formal temporary variable initialized with VAL. PRE_P is as
687 in gimplify_expr. Only use this function if:
689 1) The value of the unfactored expression represented by VAL will not
690 change between the initialization and use of the temporary, and
691 2) The temporary will not be otherwise modified.
693 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
694 and #2 means it is inappropriate for && temps.
696 For other cases, use get_initialized_tmp_var instead. */
699 get_formal_tmp_var (tree val
, gimple_seq
*pre_p
)
701 return internal_get_tmp_var (val
, pre_p
, NULL
, true, true, false);
704 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
705 are as in gimplify_expr. */
708 get_initialized_tmp_var (tree val
, gimple_seq
*pre_p
,
709 gimple_seq
*post_p
/* = NULL */,
710 bool allow_ssa
/* = true */)
712 return internal_get_tmp_var (val
, pre_p
, post_p
, false, allow_ssa
, false);
715 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
716 generate debug info for them; otherwise don't. */
719 declare_vars (tree vars
, gimple
*gs
, bool debug_info
)
726 gbind
*scope
= as_a
<gbind
*> (gs
);
728 temps
= nreverse (last
);
730 block
= gimple_bind_block (scope
);
731 gcc_assert (!block
|| TREE_CODE (block
) == BLOCK
);
732 if (!block
|| !debug_info
)
734 DECL_CHAIN (last
) = gimple_bind_vars (scope
);
735 gimple_bind_set_vars (scope
, temps
);
739 /* We need to attach the nodes both to the BIND_EXPR and to its
740 associated BLOCK for debugging purposes. The key point here
741 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
742 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
743 if (BLOCK_VARS (block
))
744 BLOCK_VARS (block
) = chainon (BLOCK_VARS (block
), temps
);
747 gimple_bind_set_vars (scope
,
748 chainon (gimple_bind_vars (scope
), temps
));
749 BLOCK_VARS (block
) = temps
;
755 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
756 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
757 no such upper bound can be obtained. */
760 force_constant_size (tree var
)
762 /* The only attempt we make is by querying the maximum size of objects
763 of the variable's type. */
765 HOST_WIDE_INT max_size
;
767 gcc_assert (VAR_P (var
));
769 max_size
= max_int_size_in_bytes (TREE_TYPE (var
));
771 gcc_assert (max_size
>= 0);
774 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var
)), max_size
);
776 = build_int_cst (TREE_TYPE (DECL_SIZE (var
)), max_size
* BITS_PER_UNIT
);
779 /* Push the temporary variable TMP into the current binding. */
782 gimple_add_tmp_var_fn (struct function
*fn
, tree tmp
)
784 gcc_assert (!DECL_CHAIN (tmp
) && !DECL_SEEN_IN_BIND_EXPR_P (tmp
));
786 /* Later processing assumes that the object size is constant, which might
787 not be true at this point. Force the use of a constant upper bound in
789 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp
)))
790 force_constant_size (tmp
);
792 DECL_CONTEXT (tmp
) = fn
->decl
;
793 DECL_SEEN_IN_BIND_EXPR_P (tmp
) = 1;
795 record_vars_into (tmp
, fn
->decl
);
798 /* Push the temporary variable TMP into the current binding. */
801 gimple_add_tmp_var (tree tmp
)
803 gcc_assert (!DECL_CHAIN (tmp
) && !DECL_SEEN_IN_BIND_EXPR_P (tmp
));
805 /* Later processing assumes that the object size is constant, which might
806 not be true at this point. Force the use of a constant upper bound in
808 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp
)))
809 force_constant_size (tmp
);
811 DECL_CONTEXT (tmp
) = current_function_decl
;
812 DECL_SEEN_IN_BIND_EXPR_P (tmp
) = 1;
816 DECL_CHAIN (tmp
) = gimplify_ctxp
->temps
;
817 gimplify_ctxp
->temps
= tmp
;
819 /* Mark temporaries local within the nearest enclosing parallel. */
820 if (gimplify_omp_ctxp
)
822 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
823 int flag
= GOVD_LOCAL
| GOVD_SEEN
;
825 && (ctx
->region_type
== ORT_WORKSHARE
826 || ctx
->region_type
== ORT_TASKGROUP
827 || ctx
->region_type
== ORT_SIMD
828 || ctx
->region_type
== ORT_ACC
))
830 if (ctx
->region_type
== ORT_SIMD
831 && TREE_ADDRESSABLE (tmp
)
832 && !TREE_STATIC (tmp
))
834 if (TREE_CODE (DECL_SIZE_UNIT (tmp
)) != INTEGER_CST
)
835 ctx
->add_safelen1
= true;
836 else if (ctx
->in_for_exprs
)
839 flag
= GOVD_PRIVATE
| GOVD_SEEN
;
842 ctx
= ctx
->outer_context
;
845 omp_add_variable (ctx
, tmp
, flag
);
854 /* This case is for nested functions. We need to expose the locals
856 body_seq
= gimple_body (current_function_decl
);
857 declare_vars (tmp
, gimple_seq_first_stmt (body_seq
), false);
863 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
864 nodes that are referenced more than once in GENERIC functions. This is
865 necessary because gimplification (translation into GIMPLE) is performed
866 by modifying tree nodes in-place, so gimplication of a shared node in a
867 first context could generate an invalid GIMPLE form in a second context.
869 This is achieved with a simple mark/copy/unmark algorithm that walks the
870 GENERIC representation top-down, marks nodes with TREE_VISITED the first
871 time it encounters them, duplicates them if they already have TREE_VISITED
872 set, and finally removes the TREE_VISITED marks it has set.
874 The algorithm works only at the function level, i.e. it generates a GENERIC
875 representation of a function with no nodes shared within the function when
876 passed a GENERIC function (except for nodes that are allowed to be shared).
878 At the global level, it is also necessary to unshare tree nodes that are
879 referenced in more than one function, for the same aforementioned reason.
880 This requires some cooperation from the front-end. There are 2 strategies:
882 1. Manual unsharing. The front-end needs to call unshare_expr on every
883 expression that might end up being shared across functions.
885 2. Deep unsharing. This is an extension of regular unsharing. Instead
886 of calling unshare_expr on expressions that might be shared across
887 functions, the front-end pre-marks them with TREE_VISITED. This will
888 ensure that they are unshared on the first reference within functions
889 when the regular unsharing algorithm runs. The counterpart is that
890 this algorithm must look deeper than for manual unsharing, which is
891 specified by LANG_HOOKS_DEEP_UNSHARING.
893 If there are only few specific cases of node sharing across functions, it is
894 probably easier for a front-end to unshare the expressions manually. On the
895 contrary, if the expressions generated at the global level are as widespread
896 as expressions generated within functions, deep unsharing is very likely the
899 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
900 These nodes model computations that must be done once. If we were to
901 unshare something like SAVE_EXPR(i++), the gimplification process would
902 create wrong code. However, if DATA is non-null, it must hold a pointer
903 set that is used to unshare the subtrees of these nodes. */
906 mostly_copy_tree_r (tree
*tp
, int *walk_subtrees
, void *data
)
909 enum tree_code code
= TREE_CODE (t
);
911 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
912 copy their subtrees if we can make sure to do it only once. */
913 if (code
== SAVE_EXPR
|| code
== TARGET_EXPR
|| code
== BIND_EXPR
)
915 if (data
&& !((hash_set
<tree
> *)data
)->add (t
))
921 /* Stop at types, decls, constants like copy_tree_r. */
922 else if (TREE_CODE_CLASS (code
) == tcc_type
923 || TREE_CODE_CLASS (code
) == tcc_declaration
924 || TREE_CODE_CLASS (code
) == tcc_constant
)
927 /* Cope with the statement expression extension. */
928 else if (code
== STATEMENT_LIST
)
931 /* Leave the bulk of the work to copy_tree_r itself. */
933 copy_tree_r (tp
, walk_subtrees
, NULL
);
938 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
939 If *TP has been visited already, then *TP is deeply copied by calling
940 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
943 copy_if_shared_r (tree
*tp
, int *walk_subtrees
, void *data
)
946 enum tree_code code
= TREE_CODE (t
);
948 /* Skip types, decls, and constants. But we do want to look at their
949 types and the bounds of types. Mark them as visited so we properly
950 unmark their subtrees on the unmark pass. If we've already seen them,
951 don't look down further. */
952 if (TREE_CODE_CLASS (code
) == tcc_type
953 || TREE_CODE_CLASS (code
) == tcc_declaration
954 || TREE_CODE_CLASS (code
) == tcc_constant
)
956 if (TREE_VISITED (t
))
959 TREE_VISITED (t
) = 1;
962 /* If this node has been visited already, unshare it and don't look
964 else if (TREE_VISITED (t
))
966 walk_tree (tp
, mostly_copy_tree_r
, data
, NULL
);
970 /* Otherwise, mark the node as visited and keep looking. */
972 TREE_VISITED (t
) = 1;
977 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
978 copy_if_shared_r callback unmodified. */
981 copy_if_shared (tree
*tp
, void *data
)
983 walk_tree (tp
, copy_if_shared_r
, data
, NULL
);
986 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
987 any nested functions. */
990 unshare_body (tree fndecl
)
992 struct cgraph_node
*cgn
= cgraph_node::get (fndecl
);
993 /* If the language requires deep unsharing, we need a pointer set to make
994 sure we don't repeatedly unshare subtrees of unshareable nodes. */
995 hash_set
<tree
> *visited
996 = lang_hooks
.deep_unsharing
? new hash_set
<tree
> : NULL
;
998 copy_if_shared (&DECL_SAVED_TREE (fndecl
), visited
);
999 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl
)), visited
);
1000 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl
)), visited
);
1005 for (cgn
= first_nested_function (cgn
); cgn
;
1006 cgn
= next_nested_function (cgn
))
1007 unshare_body (cgn
->decl
);
1010 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
1011 Subtrees are walked until the first unvisited node is encountered. */
1014 unmark_visited_r (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
1018 /* If this node has been visited, unmark it and keep looking. */
1019 if (TREE_VISITED (t
))
1020 TREE_VISITED (t
) = 0;
1022 /* Otherwise, don't look any deeper. */
1029 /* Unmark the visited trees rooted at *TP. */
1032 unmark_visited (tree
*tp
)
1034 walk_tree (tp
, unmark_visited_r
, NULL
, NULL
);
1037 /* Likewise, but mark all trees as not visited. */
1040 unvisit_body (tree fndecl
)
1042 struct cgraph_node
*cgn
= cgraph_node::get (fndecl
);
1044 unmark_visited (&DECL_SAVED_TREE (fndecl
));
1045 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl
)));
1046 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl
)));
1049 for (cgn
= first_nested_function (cgn
);
1050 cgn
; cgn
= next_nested_function (cgn
))
1051 unvisit_body (cgn
->decl
);
1054 /* Unconditionally make an unshared copy of EXPR. This is used when using
1055 stored expressions which span multiple functions, such as BINFO_VTABLE,
1056 as the normal unsharing process can't tell that they're shared. */
1059 unshare_expr (tree expr
)
1061 walk_tree (&expr
, mostly_copy_tree_r
, NULL
, NULL
);
1065 /* Worker for unshare_expr_without_location. */
1068 prune_expr_location (tree
*tp
, int *walk_subtrees
, void *)
1071 SET_EXPR_LOCATION (*tp
, UNKNOWN_LOCATION
);
1077 /* Similar to unshare_expr but also prune all expression locations
1081 unshare_expr_without_location (tree expr
)
1083 walk_tree (&expr
, mostly_copy_tree_r
, NULL
, NULL
);
1085 walk_tree (&expr
, prune_expr_location
, NULL
, NULL
);
1089 /* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has
1090 one, OR_ELSE otherwise. The location of a STATEMENT_LISTs
1091 comprising at least one DEBUG_BEGIN_STMT followed by exactly one
1092 EXPR is the location of the EXPR. */
1095 rexpr_location (tree expr
, location_t or_else
= UNKNOWN_LOCATION
)
1100 if (EXPR_HAS_LOCATION (expr
))
1101 return EXPR_LOCATION (expr
);
1103 if (TREE_CODE (expr
) != STATEMENT_LIST
)
1106 tree_stmt_iterator i
= tsi_start (expr
);
1109 while (!tsi_end_p (i
) && TREE_CODE (tsi_stmt (i
)) == DEBUG_BEGIN_STMT
)
1115 if (!found
|| !tsi_one_before_end_p (i
))
1118 return rexpr_location (tsi_stmt (i
), or_else
);
1121 /* Return TRUE iff EXPR (maybe recursively) has a location; see
1122 rexpr_location for the potential recursion. */
1125 rexpr_has_location (tree expr
)
1127 return rexpr_location (expr
) != UNKNOWN_LOCATION
;
1131 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1132 contain statements and have a value. Assign its value to a temporary
1133 and give it void_type_node. Return the temporary, or NULL_TREE if
1134 WRAPPER was already void. */
1137 voidify_wrapper_expr (tree wrapper
, tree temp
)
1139 tree type
= TREE_TYPE (wrapper
);
1140 if (type
&& !VOID_TYPE_P (type
))
1144 /* Set p to point to the body of the wrapper. Loop until we find
1145 something that isn't a wrapper. */
1146 for (p
= &wrapper
; p
&& *p
; )
1148 switch (TREE_CODE (*p
))
1151 TREE_SIDE_EFFECTS (*p
) = 1;
1152 TREE_TYPE (*p
) = void_type_node
;
1153 /* For a BIND_EXPR, the body is operand 1. */
1154 p
= &BIND_EXPR_BODY (*p
);
1157 case CLEANUP_POINT_EXPR
:
1158 case TRY_FINALLY_EXPR
:
1159 case TRY_CATCH_EXPR
:
1160 TREE_SIDE_EFFECTS (*p
) = 1;
1161 TREE_TYPE (*p
) = void_type_node
;
1162 p
= &TREE_OPERAND (*p
, 0);
1165 case STATEMENT_LIST
:
1167 tree_stmt_iterator i
= tsi_last (*p
);
1168 TREE_SIDE_EFFECTS (*p
) = 1;
1169 TREE_TYPE (*p
) = void_type_node
;
1170 p
= tsi_end_p (i
) ? NULL
: tsi_stmt_ptr (i
);
1175 /* Advance to the last statement. Set all container types to
1177 for (; TREE_CODE (*p
) == COMPOUND_EXPR
; p
= &TREE_OPERAND (*p
, 1))
1179 TREE_SIDE_EFFECTS (*p
) = 1;
1180 TREE_TYPE (*p
) = void_type_node
;
1184 case TRANSACTION_EXPR
:
1185 TREE_SIDE_EFFECTS (*p
) = 1;
1186 TREE_TYPE (*p
) = void_type_node
;
1187 p
= &TRANSACTION_EXPR_BODY (*p
);
1191 /* Assume that any tree upon which voidify_wrapper_expr is
1192 directly called is a wrapper, and that its body is op0. */
1195 TREE_SIDE_EFFECTS (*p
) = 1;
1196 TREE_TYPE (*p
) = void_type_node
;
1197 p
= &TREE_OPERAND (*p
, 0);
1205 if (p
== NULL
|| IS_EMPTY_STMT (*p
))
1209 /* The wrapper is on the RHS of an assignment that we're pushing
1211 gcc_assert (TREE_CODE (temp
) == INIT_EXPR
1212 || TREE_CODE (temp
) == MODIFY_EXPR
);
1213 TREE_OPERAND (temp
, 1) = *p
;
1218 temp
= create_tmp_var (type
, "retval");
1219 *p
= build2 (INIT_EXPR
, type
, temp
, *p
);
1228 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1229 a temporary through which they communicate. */
1232 build_stack_save_restore (gcall
**save
, gcall
**restore
)
1236 *save
= gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE
), 0);
1237 tmp_var
= create_tmp_var (ptr_type_node
, "saved_stack");
1238 gimple_call_set_lhs (*save
, tmp_var
);
1241 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE
),
1245 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1248 build_asan_poison_call_expr (tree decl
)
1250 /* Do not poison variables that have size equal to zero. */
1251 tree unit_size
= DECL_SIZE_UNIT (decl
);
1252 if (zerop (unit_size
))
1255 tree base
= build_fold_addr_expr (decl
);
1257 return build_call_expr_internal_loc (UNKNOWN_LOCATION
, IFN_ASAN_MARK
,
1259 build_int_cst (integer_type_node
,
1264 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1265 on POISON flag, shadow memory of a DECL variable. The call will be
1266 put on location identified by IT iterator, where BEFORE flag drives
1267 position where the stmt will be put. */
1270 asan_poison_variable (tree decl
, bool poison
, gimple_stmt_iterator
*it
,
1273 tree unit_size
= DECL_SIZE_UNIT (decl
);
1274 tree base
= build_fold_addr_expr (decl
);
1276 /* Do not poison variables that have size equal to zero. */
1277 if (zerop (unit_size
))
1280 /* It's necessary to have all stack variables aligned to ASAN granularity
1282 gcc_assert (!hwasan_sanitize_p () || hwasan_sanitize_stack_p ());
1283 unsigned shadow_granularity
1284 = hwasan_sanitize_p () ? HWASAN_TAG_GRANULE_SIZE
: ASAN_SHADOW_GRANULARITY
;
1285 if (DECL_ALIGN_UNIT (decl
) <= shadow_granularity
)
1286 SET_DECL_ALIGN (decl
, BITS_PER_UNIT
* shadow_granularity
);
1288 HOST_WIDE_INT flags
= poison
? ASAN_MARK_POISON
: ASAN_MARK_UNPOISON
;
1291 = gimple_build_call_internal (IFN_ASAN_MARK
, 3,
1292 build_int_cst (integer_type_node
, flags
),
1296 gsi_insert_before (it
, g
, GSI_NEW_STMT
);
1298 gsi_insert_after (it
, g
, GSI_NEW_STMT
);
1301 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1302 either poisons or unpoisons a DECL. Created statement is appended
1303 to SEQ_P gimple sequence. */
1306 asan_poison_variable (tree decl
, bool poison
, gimple_seq
*seq_p
)
1308 gimple_stmt_iterator it
= gsi_last (*seq_p
);
1309 bool before
= false;
1314 asan_poison_variable (decl
, poison
, &it
, before
);
1317 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1320 sort_by_decl_uid (const void *a
, const void *b
)
1322 const tree
*t1
= (const tree
*)a
;
1323 const tree
*t2
= (const tree
*)b
;
1325 int uid1
= DECL_UID (*t1
);
1326 int uid2
= DECL_UID (*t2
);
1330 else if (uid1
> uid2
)
1336 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1337 depending on POISON flag. Created statement is appended
1338 to SEQ_P gimple sequence. */
1341 asan_poison_variables (hash_set
<tree
> *variables
, bool poison
, gimple_seq
*seq_p
)
1343 unsigned c
= variables
->elements ();
1347 auto_vec
<tree
> sorted_variables (c
);
1349 for (hash_set
<tree
>::iterator it
= variables
->begin ();
1350 it
!= variables
->end (); ++it
)
1351 sorted_variables
.safe_push (*it
);
1353 sorted_variables
.qsort (sort_by_decl_uid
);
1357 FOR_EACH_VEC_ELT (sorted_variables
, i
, var
)
1359 asan_poison_variable (var
, poison
, seq_p
);
1361 /* Add use_after_scope_memory attribute for the variable in order
1362 to prevent re-written into SSA. */
1363 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE
,
1364 DECL_ATTRIBUTES (var
)))
1365 DECL_ATTRIBUTES (var
)
1366 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE
),
1368 DECL_ATTRIBUTES (var
));
1372 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1374 static enum gimplify_status
1375 gimplify_bind_expr (tree
*expr_p
, gimple_seq
*pre_p
)
1377 tree bind_expr
= *expr_p
;
1378 bool old_keep_stack
= gimplify_ctxp
->keep_stack
;
1379 bool old_save_stack
= gimplify_ctxp
->save_stack
;
1382 gimple_seq body
, cleanup
;
1384 location_t start_locus
= 0, end_locus
= 0;
1385 tree ret_clauses
= NULL
;
1387 tree temp
= voidify_wrapper_expr (bind_expr
, NULL
);
1389 /* Mark variables seen in this bind expr. */
1390 for (t
= BIND_EXPR_VARS (bind_expr
); t
; t
= DECL_CHAIN (t
))
1394 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
1398 && !is_global_var (t
)
1400 && DECL_CONTEXT (t
) == current_function_decl
1402 && (attr
= lookup_attribute ("omp allocate", DECL_ATTRIBUTES (t
)))
1405 gcc_assert (!DECL_HAS_VALUE_EXPR_P (t
));
1406 tree alloc
= TREE_PURPOSE (TREE_VALUE (attr
));
1407 tree align
= TREE_VALUE (TREE_VALUE (attr
));
1408 /* Allocate directives that appear in a target region must specify
1409 an allocator clause unless a requires directive with the
1410 dynamic_allocators clause is present in the same compilation
1412 bool missing_dyn_alloc
= false;
1413 if (alloc
== NULL_TREE
1414 && ((omp_requires_mask
& OMP_REQUIRES_DYNAMIC_ALLOCATORS
)
1417 /* This comes too early for omp_discover_declare_target...,
1418 but should at least catch the most common cases. */
1420 = cgraph_node::get (current_function_decl
)->offloadable
;
1421 for (struct gimplify_omp_ctx
*ctx2
= ctx
;
1422 ctx2
&& !missing_dyn_alloc
; ctx2
= ctx2
->outer_context
)
1423 if (ctx2
->code
== OMP_TARGET
)
1424 missing_dyn_alloc
= true;
1426 if (missing_dyn_alloc
)
1427 error_at (DECL_SOURCE_LOCATION (t
),
1428 "%<allocate%> directive for %qD inside a target "
1429 "region must specify an %<allocator%> clause", t
);
1430 /* Skip for omp_default_mem_alloc (= 1),
1431 unless align is present. For C/C++, there should be always a
1432 statement list following if TREE_USED, except for, e.g., using
1433 this decl in a static_assert; in that case, only a single
1434 DECL_EXPR remains, which can be skipped here. */
1435 else if (!errorcount
1436 && (align
!= NULL_TREE
1437 || alloc
== NULL_TREE
1438 || !integer_onep (alloc
))
1439 && (lang_GNU_Fortran ()
1440 || (TREE_CODE (BIND_EXPR_BODY (bind_expr
))
1443 /* Fortran might already use a pointer type internally;
1444 use that pointer except for type(C_ptr) and type(C_funptr);
1445 note that normal proc pointers are rejected. */
1446 tree type
= TREE_TYPE (t
);
1448 if (lang_GNU_Fortran ()
1449 && POINTER_TYPE_P (type
)
1450 && TREE_TYPE (type
) != void_type_node
1451 && TREE_CODE (TREE_TYPE (type
)) != FUNCTION_TYPE
)
1453 type
= TREE_TYPE (type
);
1458 tmp
= build_pointer_type (type
);
1459 v
= create_tmp_var (tmp
, get_name (t
));
1460 DECL_IGNORED_P (v
) = 0;
1462 = tree_cons (get_identifier ("omp allocate var"),
1463 build_tree_list (NULL_TREE
, t
),
1464 remove_attribute ("omp allocate",
1465 DECL_ATTRIBUTES (t
)));
1466 tmp
= build_fold_indirect_ref (v
);
1467 TREE_THIS_NOTRAP (tmp
) = 1;
1468 SET_DECL_VALUE_EXPR (t
, tmp
);
1469 DECL_HAS_VALUE_EXPR_P (t
) = 1;
1471 tree sz
= TYPE_SIZE_UNIT (type
);
1472 /* The size to use in Fortran might not match TYPE_SIZE_UNIT;
1473 hence, for some decls, a size variable is saved in the
1474 attributes; use it, if available. */
1475 if (TREE_CHAIN (TREE_VALUE (attr
))
1476 && TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr
)))
1478 TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr
)))))
1480 sz
= TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr
)));
1481 sz
= TREE_PURPOSE (sz
);
1483 if (alloc
== NULL_TREE
)
1484 alloc
= build_zero_cst (ptr_type_node
);
1485 if (align
== NULL_TREE
)
1486 align
= build_int_cst (size_type_node
, DECL_ALIGN_UNIT (t
));
1488 align
= build_int_cst (size_type_node
,
1489 MAX (tree_to_uhwi (align
),
1490 DECL_ALIGN_UNIT (t
)));
1491 location_t loc
= DECL_SOURCE_LOCATION (t
);
1492 tmp
= builtin_decl_explicit (BUILT_IN_GOMP_ALLOC
);
1493 tmp
= build_call_expr_loc (loc
, tmp
, 3, align
, sz
, alloc
);
1494 tmp
= fold_build2_loc (loc
, MODIFY_EXPR
, TREE_TYPE (v
), v
,
1495 fold_convert (TREE_TYPE (v
), tmp
));
1496 gcc_assert (BIND_EXPR_BODY (bind_expr
) != NULL_TREE
);
1497 /* Ensure that either TREE_CHAIN (TREE_VALUE (attr) is set
1498 and GOMP_FREE added here or that DECL_HAS_VALUE_EXPR_P (t)
1499 is set, using in a condition much further below. */
1500 gcc_assert (DECL_HAS_VALUE_EXPR_P (t
)
1501 || TREE_CHAIN (TREE_VALUE (attr
)));
1502 if (TREE_CHAIN (TREE_VALUE (attr
)))
1504 /* Fortran is special as it does not have properly nest
1505 declarations in blocks. And as there is no
1506 initializer, there is also no expression to look for.
1507 Hence, the FE makes the statement list of the
1508 try-finally block available. We can put the GOMP_alloc
1509 at the top, unless an allocator or size expression
1510 requires to put it afterward; note that the size is
1511 always later in generated code; for strings, no
1512 size expr but still an expr might be available.
1513 As LTO does not handle a statement list, 'sl' has
1514 to be removed; done so by removing the attribute. */
1516 = remove_attribute ("omp allocate",
1517 DECL_ATTRIBUTES (t
));
1518 tree sl
= TREE_PURPOSE (TREE_CHAIN (TREE_VALUE (attr
)));
1519 tree_stmt_iterator e
= tsi_start (sl
);
1520 tree needle
= NULL_TREE
;
1521 if (TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr
))))
1523 needle
= TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr
)));
1524 needle
= (TREE_VALUE (needle
) ? TREE_VALUE (needle
)
1527 else if (TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr
))))
1529 else if (DECL_P (alloc
) && DECL_ARTIFICIAL (alloc
))
1532 if (needle
!= NULL_TREE
)
1534 while (!tsi_end_p (e
))
1537 || (TREE_CODE (*e
) == MODIFY_EXPR
1538 && TREE_OPERAND (*e
, 0) == needle
))
1542 gcc_assert (!tsi_end_p (e
));
1544 tsi_link_after (&e
, tmp
, TSI_SAME_STMT
);
1546 /* As the cleanup is in BIND_EXPR_BODY, GOMP_free is added
1547 here; for C/C++ it will be added in the 'cleanup'
1548 section after gimplification. But Fortran already has
1549 a try-finally block. */
1550 sl
= TREE_VALUE (TREE_CHAIN (TREE_VALUE (attr
)));
1552 tmp
= builtin_decl_explicit (BUILT_IN_GOMP_FREE
);
1553 tmp
= build_call_expr_loc (EXPR_LOCATION (*e
), tmp
, 2, v
,
1554 build_zero_cst (ptr_type_node
));
1555 tsi_link_after (&e
, tmp
, TSI_SAME_STMT
);
1556 tmp
= build_clobber (TREE_TYPE (v
), CLOBBER_STORAGE_END
);
1557 tmp
= fold_build2_loc (loc
, MODIFY_EXPR
, TREE_TYPE (v
), v
,
1558 fold_convert (TREE_TYPE (v
), tmp
));
1560 tsi_link_after (&e
, tmp
, TSI_SAME_STMT
);
1564 gcc_assert (TREE_CODE (BIND_EXPR_BODY (bind_expr
))
1566 tree_stmt_iterator e
;
1567 e
= tsi_start (BIND_EXPR_BODY (bind_expr
));
1568 while (!tsi_end_p (e
))
1570 if ((TREE_CODE (*e
) == DECL_EXPR
1571 && TREE_OPERAND (*e
, 0) == t
)
1572 || (TREE_CODE (*e
) == CLEANUP_POINT_EXPR
1573 && (TREE_CODE (TREE_OPERAND (*e
, 0))
1575 && (TREE_OPERAND (TREE_OPERAND (*e
, 0), 0)
1580 gcc_assert (!tsi_end_p (e
));
1581 tsi_link_before (&e
, tmp
, TSI_SAME_STMT
);
1586 /* Mark variable as local. */
1587 if (ctx
&& ctx
->region_type
!= ORT_NONE
&& !DECL_EXTERNAL (t
))
1589 if (! DECL_SEEN_IN_BIND_EXPR_P (t
)
1590 || splay_tree_lookup (ctx
->variables
,
1591 (splay_tree_key
) t
) == NULL
)
1593 int flag
= GOVD_LOCAL
;
1594 if (ctx
->region_type
== ORT_SIMD
1595 && TREE_ADDRESSABLE (t
)
1596 && !TREE_STATIC (t
))
1598 if (TREE_CODE (DECL_SIZE_UNIT (t
)) != INTEGER_CST
)
1599 ctx
->add_safelen1
= true;
1601 flag
= GOVD_PRIVATE
;
1603 omp_add_variable (ctx
, t
, flag
| GOVD_SEEN
);
1605 /* Static locals inside of target construct or offloaded
1606 routines need to be "omp declare target". */
1607 if (TREE_STATIC (t
))
1608 for (; ctx
; ctx
= ctx
->outer_context
)
1609 if ((ctx
->region_type
& ORT_TARGET
) != 0)
1611 if (!lookup_attribute ("omp declare target",
1612 DECL_ATTRIBUTES (t
)))
1614 tree id
= get_identifier ("omp declare target");
1616 = tree_cons (id
, NULL_TREE
, DECL_ATTRIBUTES (t
));
1617 varpool_node
*node
= varpool_node::get (t
);
1620 node
->offloadable
= 1;
1621 if (ENABLE_OFFLOADING
&& !DECL_EXTERNAL (t
))
1623 g
->have_offload
= true;
1625 vec_safe_push (offload_vars
, t
);
1633 DECL_SEEN_IN_BIND_EXPR_P (t
) = 1;
1635 if (DECL_HARD_REGISTER (t
) && !is_global_var (t
) && cfun
)
1636 cfun
->has_local_explicit_reg_vars
= true;
1640 bind_stmt
= gimple_build_bind (BIND_EXPR_VARS (bind_expr
), NULL
,
1641 BIND_EXPR_BLOCK (bind_expr
));
1642 gimple_push_bind_expr (bind_stmt
);
1644 gimplify_ctxp
->keep_stack
= false;
1645 gimplify_ctxp
->save_stack
= false;
1647 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1649 gimplify_stmt (&BIND_EXPR_BODY (bind_expr
), &body
);
1650 gimple_bind_set_body (bind_stmt
, body
);
1652 /* Source location wise, the cleanup code (stack_restore and clobbers)
1653 belongs to the end of the block, so propagate what we have. The
1654 stack_save operation belongs to the beginning of block, which we can
1655 infer from the bind_expr directly if the block has no explicit
1657 if (BIND_EXPR_BLOCK (bind_expr
))
1659 end_locus
= BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr
));
1660 start_locus
= BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr
));
1662 if (start_locus
== 0)
1663 start_locus
= EXPR_LOCATION (bind_expr
);
1668 /* Add clobbers for all variables that go out of scope. */
1669 for (t
= BIND_EXPR_VARS (bind_expr
); t
; t
= DECL_CHAIN (t
))
1672 && !is_global_var (t
)
1673 && DECL_CONTEXT (t
) == current_function_decl
)
1676 && DECL_HAS_VALUE_EXPR_P (t
)
1678 && lookup_attribute ("omp allocate", DECL_ATTRIBUTES (t
)))
1680 /* For Fortran, TREE_CHAIN (TREE_VALUE (attr)) is set, which
1681 causes that the GOMP_free call is already added above;
1682 and "omp allocate" is removed from DECL_ATTRIBUTES. */
1683 tree v
= TREE_OPERAND (DECL_VALUE_EXPR (t
), 0);
1684 tree tmp
= builtin_decl_explicit (BUILT_IN_GOMP_FREE
);
1685 tmp
= build_call_expr_loc (end_locus
, tmp
, 2, v
,
1686 build_zero_cst (ptr_type_node
));
1687 gimplify_and_add (tmp
, &cleanup
);
1688 gimple
*clobber_stmt
;
1689 tmp
= build_clobber (TREE_TYPE (v
), CLOBBER_STORAGE_END
);
1690 clobber_stmt
= gimple_build_assign (v
, tmp
);
1691 gimple_set_location (clobber_stmt
, end_locus
);
1692 gimplify_seq_add_stmt (&cleanup
, clobber_stmt
);
1694 if (!DECL_HARD_REGISTER (t
)
1695 && !TREE_THIS_VOLATILE (t
)
1696 && !DECL_HAS_VALUE_EXPR_P (t
)
1697 /* Only care for variables that have to be in memory. Others
1698 will be rewritten into SSA names, hence moved to the
1700 && !is_gimple_reg (t
)
1701 && flag_stack_reuse
!= SR_NONE
)
1703 tree clobber
= build_clobber (TREE_TYPE (t
), CLOBBER_STORAGE_END
);
1704 gimple
*clobber_stmt
;
1705 clobber_stmt
= gimple_build_assign (t
, clobber
);
1706 gimple_set_location (clobber_stmt
, end_locus
);
1707 gimplify_seq_add_stmt (&cleanup
, clobber_stmt
);
1710 if (flag_openacc
&& oacc_declare_returns
!= NULL
)
1713 if (DECL_HAS_VALUE_EXPR_P (key
))
1715 key
= DECL_VALUE_EXPR (key
);
1716 if (INDIRECT_REF_P (key
))
1717 key
= TREE_OPERAND (key
, 0);
1719 tree
*c
= oacc_declare_returns
->get (key
);
1723 OMP_CLAUSE_CHAIN (*c
) = ret_clauses
;
1725 ret_clauses
= unshare_expr (*c
);
1727 oacc_declare_returns
->remove (key
);
1729 if (oacc_declare_returns
->is_empty ())
1731 delete oacc_declare_returns
;
1732 oacc_declare_returns
= NULL
;
1738 if (asan_poisoned_variables
!= NULL
1739 && asan_poisoned_variables
->contains (t
))
1741 asan_poisoned_variables
->remove (t
);
1742 asan_poison_variable (t
, true, &cleanup
);
1745 if (gimplify_ctxp
->live_switch_vars
!= NULL
1746 && gimplify_ctxp
->live_switch_vars
->contains (t
))
1747 gimplify_ctxp
->live_switch_vars
->remove (t
);
1750 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1751 the stack space allocated to the VLAs. */
1752 if (gimplify_ctxp
->save_stack
&& !gimplify_ctxp
->keep_stack
)
1754 gcall
*stack_restore
;
1756 /* Save stack on entry and restore it on exit. Add a try_finally
1757 block to achieve this. */
1758 build_stack_save_restore (&stack_save
, &stack_restore
);
1760 gimple_set_location (stack_save
, start_locus
);
1761 gimple_set_location (stack_restore
, end_locus
);
1763 gimplify_seq_add_stmt (&cleanup
, stack_restore
);
1769 gimple_stmt_iterator si
= gsi_start (cleanup
);
1771 stmt
= gimple_build_omp_target (NULL
, GF_OMP_TARGET_KIND_OACC_DECLARE
,
1773 gsi_insert_seq_before_without_update (&si
, stmt
, GSI_NEW_STMT
);
1779 gimple_seq new_body
;
1782 gs
= gimple_build_try (gimple_bind_body (bind_stmt
), cleanup
,
1783 GIMPLE_TRY_FINALLY
);
1786 gimplify_seq_add_stmt (&new_body
, stack_save
);
1787 gimplify_seq_add_stmt (&new_body
, gs
);
1788 gimple_bind_set_body (bind_stmt
, new_body
);
1791 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1792 if (!gimplify_ctxp
->keep_stack
)
1793 gimplify_ctxp
->keep_stack
= old_keep_stack
;
1794 gimplify_ctxp
->save_stack
= old_save_stack
;
1796 gimple_pop_bind_expr ();
1798 gimplify_seq_add_stmt (pre_p
, bind_stmt
);
1806 *expr_p
= NULL_TREE
;
1810 /* Maybe add early return predict statement to PRE_P sequence. */
1813 maybe_add_early_return_predict_stmt (gimple_seq
*pre_p
)
1815 /* If we are not in a conditional context, add PREDICT statement. */
1816 if (gimple_conditional_context ())
1818 gimple
*predict
= gimple_build_predict (PRED_TREE_EARLY_RETURN
,
1820 gimplify_seq_add_stmt (pre_p
, predict
);
1824 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1825 GIMPLE value, it is assigned to a new temporary and the statement is
1826 re-written to return the temporary.
1828 PRE_P points to the sequence where side effects that must happen before
1829 STMT should be stored. */
1831 static enum gimplify_status
1832 gimplify_return_expr (tree stmt
, gimple_seq
*pre_p
)
1835 tree ret_expr
= TREE_OPERAND (stmt
, 0);
1836 tree result_decl
, result
;
1838 if (ret_expr
== error_mark_node
)
1842 || TREE_CODE (ret_expr
) == RESULT_DECL
)
1844 maybe_add_early_return_predict_stmt (pre_p
);
1845 greturn
*ret
= gimple_build_return (ret_expr
);
1846 copy_warning (ret
, stmt
);
1847 gimplify_seq_add_stmt (pre_p
, ret
);
1851 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
))))
1852 result_decl
= NULL_TREE
;
1853 else if (TREE_CODE (ret_expr
) == COMPOUND_EXPR
)
1855 /* Used in C++ for handling EH cleanup of the return value if a local
1856 cleanup throws. Assume the front-end knows what it's doing. */
1857 result_decl
= DECL_RESULT (current_function_decl
);
1858 /* But crash if we end up trying to modify ret_expr below. */
1859 ret_expr
= NULL_TREE
;
1863 result_decl
= TREE_OPERAND (ret_expr
, 0);
1865 /* See through a return by reference. */
1866 if (INDIRECT_REF_P (result_decl
))
1867 result_decl
= TREE_OPERAND (result_decl
, 0);
1869 gcc_assert ((TREE_CODE (ret_expr
) == MODIFY_EXPR
1870 || TREE_CODE (ret_expr
) == INIT_EXPR
)
1871 && TREE_CODE (result_decl
) == RESULT_DECL
);
1874 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1875 Recall that aggregate_value_p is FALSE for any aggregate type that is
1876 returned in registers. If we're returning values in registers, then
1877 we don't want to extend the lifetime of the RESULT_DECL, particularly
1878 across another call. In addition, for those aggregates for which
1879 hard_function_value generates a PARALLEL, we'll die during normal
1880 expansion of structure assignments; there's special code in expand_return
1881 to handle this case that does not exist in expand_expr. */
1884 else if (aggregate_value_p (result_decl
, TREE_TYPE (current_function_decl
)))
1886 if (!poly_int_tree_p (DECL_SIZE (result_decl
)))
1888 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl
)))
1889 gimplify_type_sizes (TREE_TYPE (result_decl
), pre_p
);
1890 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1891 should be effectively allocated by the caller, i.e. all calls to
1892 this function must be subject to the Return Slot Optimization. */
1893 gimplify_one_sizepos (&DECL_SIZE (result_decl
), pre_p
);
1894 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl
), pre_p
);
1896 result
= result_decl
;
1898 else if (gimplify_ctxp
->return_temp
)
1899 result
= gimplify_ctxp
->return_temp
;
1902 result
= create_tmp_reg (TREE_TYPE (result_decl
));
1904 /* ??? With complex control flow (usually involving abnormal edges),
1905 we can wind up warning about an uninitialized value for this. Due
1906 to how this variable is constructed and initialized, this is never
1907 true. Give up and never warn. */
1908 suppress_warning (result
, OPT_Wuninitialized
);
1910 gimplify_ctxp
->return_temp
= result
;
1913 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1914 Then gimplify the whole thing. */
1915 if (result
!= result_decl
)
1916 TREE_OPERAND (ret_expr
, 0) = result
;
1918 gimplify_and_add (TREE_OPERAND (stmt
, 0), pre_p
);
1920 maybe_add_early_return_predict_stmt (pre_p
);
1921 ret
= gimple_build_return (result
);
1922 copy_warning (ret
, stmt
);
1923 gimplify_seq_add_stmt (pre_p
, ret
);
1928 /* Gimplify a variable-length array DECL. */
1931 gimplify_vla_decl (tree decl
, gimple_seq
*seq_p
)
1933 /* This is a variable-sized decl. Simplify its size and mark it
1934 for deferred expansion. */
1935 tree t
, addr
, ptr_type
;
1937 gimplify_one_sizepos (&DECL_SIZE (decl
), seq_p
);
1938 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl
), seq_p
);
1940 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1941 if (DECL_HAS_VALUE_EXPR_P (decl
))
1944 /* All occurrences of this decl in final gimplified code will be
1945 replaced by indirection. Setting DECL_VALUE_EXPR does two
1946 things: First, it lets the rest of the gimplifier know what
1947 replacement to use. Second, it lets the debug info know
1948 where to find the value. */
1949 ptr_type
= build_pointer_type (TREE_TYPE (decl
));
1950 addr
= create_tmp_var (ptr_type
, get_name (decl
));
1951 DECL_IGNORED_P (addr
) = 0;
1952 t
= build_fold_indirect_ref (addr
);
1953 TREE_THIS_NOTRAP (t
) = 1;
1954 SET_DECL_VALUE_EXPR (decl
, t
);
1955 DECL_HAS_VALUE_EXPR_P (decl
) = 1;
1957 t
= build_alloca_call_expr (DECL_SIZE_UNIT (decl
), DECL_ALIGN (decl
),
1958 max_int_size_in_bytes (TREE_TYPE (decl
)));
1959 /* The call has been built for a variable-sized object. */
1960 CALL_ALLOCA_FOR_VAR_P (t
) = 1;
1961 t
= fold_convert (ptr_type
, t
);
1962 t
= build2 (MODIFY_EXPR
, TREE_TYPE (addr
), addr
, t
);
1964 gimplify_and_add (t
, seq_p
);
1966 /* Record the dynamic allocation associated with DECL if requested. */
1967 if (flag_callgraph_info
& CALLGRAPH_INFO_DYNAMIC_ALLOC
)
1968 record_dynamic_alloc (decl
);
1971 /* A helper function to be called via walk_tree. Mark all labels under *TP
1972 as being forced. To be called for DECL_INITIAL of static variables. */
1975 force_labels_r (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
1979 if (TREE_CODE (*tp
) == LABEL_DECL
)
1981 FORCED_LABEL (*tp
) = 1;
1982 cfun
->has_forced_label_in_static
= 1;
1988 /* Generate an initialization to automatic variable DECL based on INIT_TYPE.
1989 Build a call to internal const function DEFERRED_INIT:
1990 1st argument: SIZE of the DECL;
1991 2nd argument: INIT_TYPE;
1992 3rd argument: NAME of the DECL;
1994 as LHS = DEFERRED_INIT (SIZE of the DECL, INIT_TYPE, NAME of the DECL). */
1997 gimple_add_init_for_auto_var (tree decl
,
1998 enum auto_init_type init_type
,
2001 gcc_assert (auto_var_p (decl
));
2002 gcc_assert (init_type
> AUTO_INIT_UNINITIALIZED
);
2004 const location_t loc
= DECL_SOURCE_LOCATION (decl
);
2005 tree decl_size
= TYPE_SIZE_UNIT (TREE_TYPE (decl
));
2006 tree init_type_node
= build_int_cst (integer_type_node
, (int) init_type
);
2009 if (DECL_NAME (decl
))
2010 decl_name
= build_string_literal (DECL_NAME (decl
));
2013 char decl_name_anonymous
[3 + (HOST_BITS_PER_INT
+ 2) / 3];
2014 sprintf (decl_name_anonymous
, "D.%u", DECL_UID (decl
));
2015 decl_name
= build_string_literal (decl_name_anonymous
);
2018 tree call
= build_call_expr_internal_loc (loc
, IFN_DEFERRED_INIT
,
2019 TREE_TYPE (decl
), 3,
2020 decl_size
, init_type_node
,
2023 gimplify_assign (decl
, call
, seq_p
);
2026 /* Generate padding initialization for automatic variable DECL.
2027 C guarantees that brace-init with fewer initializers than members
2028 aggregate will initialize the rest of the aggregate as-if it were
2029 static initialization. In turn static initialization guarantees
2030 that padding is initialized to zero. So, we always initialize paddings
2031 to zeroes regardless INIT_TYPE.
2032 To do the padding initialization, we insert a call to
2033 __builtin_clear_padding (&decl, 0, for_auto_init = true).
2034 Note, we add an additional dummy argument for __builtin_clear_padding,
2035 'for_auto_init' to distinguish whether this call is for automatic
2036 variable initialization or not.
2039 gimple_add_padding_init_for_auto_var (tree decl
, bool is_vla
,
2042 tree addr_of_decl
= NULL_TREE
;
2043 tree fn
= builtin_decl_explicit (BUILT_IN_CLEAR_PADDING
);
2047 /* The temporary address variable for this vla should be
2048 created in gimplify_vla_decl. */
2049 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl
));
2050 gcc_assert (INDIRECT_REF_P (DECL_VALUE_EXPR (decl
)));
2051 addr_of_decl
= TREE_OPERAND (DECL_VALUE_EXPR (decl
), 0);
2055 mark_addressable (decl
);
2056 addr_of_decl
= build_fold_addr_expr (decl
);
2059 gimple
*call
= gimple_build_call (fn
, 2, addr_of_decl
,
2060 build_one_cst (TREE_TYPE (addr_of_decl
)));
2061 gimplify_seq_add_stmt (seq_p
, call
);
2064 /* Return true if the DECL need to be automaticly initialized by the
2067 is_var_need_auto_init (tree decl
)
2069 if (auto_var_p (decl
)
2070 && (TREE_CODE (decl
) != VAR_DECL
2071 || !DECL_HARD_REGISTER (decl
))
2072 && (flag_auto_var_init
> AUTO_INIT_UNINITIALIZED
)
2073 && (!lookup_attribute ("uninitialized", DECL_ATTRIBUTES (decl
)))
2074 && !OPAQUE_TYPE_P (TREE_TYPE (decl
))
2075 && !is_empty_type (TREE_TYPE (decl
)))
2080 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
2081 and initialization explicit. */
2083 static enum gimplify_status
2084 gimplify_decl_expr (tree
*stmt_p
, gimple_seq
*seq_p
)
2086 tree stmt
= *stmt_p
;
2087 tree decl
= DECL_EXPR_DECL (stmt
);
2089 *stmt_p
= NULL_TREE
;
2091 if (TREE_TYPE (decl
) == error_mark_node
)
2094 if ((TREE_CODE (decl
) == TYPE_DECL
2096 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl
)))
2098 gimplify_type_sizes (TREE_TYPE (decl
), seq_p
);
2099 if (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
)
2100 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl
)), seq_p
);
2103 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
2104 in case its size expressions contain problematic nodes like CALL_EXPR. */
2105 if (TREE_CODE (decl
) == TYPE_DECL
2106 && DECL_ORIGINAL_TYPE (decl
)
2107 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl
)))
2109 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl
), seq_p
);
2110 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl
)) == REFERENCE_TYPE
)
2111 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl
)), seq_p
);
2114 if (VAR_P (decl
) && !DECL_EXTERNAL (decl
))
2116 tree init
= DECL_INITIAL (decl
);
2117 bool is_vla
= false;
2118 /* Check whether a decl has FE created VALUE_EXPR here BEFORE
2119 gimplify_vla_decl creates VALUE_EXPR for a vla decl.
2120 If the decl has VALUE_EXPR that was created by FE (usually
2121 C++FE), it's a proxy varaible, and FE already initialized
2122 the VALUE_EXPR of it, we should not initialize it anymore. */
2123 bool decl_had_value_expr_p
= DECL_HAS_VALUE_EXPR_P (decl
);
2126 if (!poly_int_tree_p (DECL_SIZE_UNIT (decl
), &size
)
2127 || (!TREE_STATIC (decl
)
2128 && flag_stack_check
== GENERIC_STACK_CHECK
2130 (unsigned HOST_WIDE_INT
) STACK_CHECK_MAX_VAR_SIZE
)))
2132 gimplify_vla_decl (decl
, seq_p
);
2136 if (asan_poisoned_variables
2138 && TREE_ADDRESSABLE (decl
)
2139 && !TREE_STATIC (decl
)
2140 && !DECL_HAS_VALUE_EXPR_P (decl
)
2141 && DECL_ALIGN (decl
) <= MAX_SUPPORTED_STACK_ALIGNMENT
2142 && dbg_cnt (asan_use_after_scope
)
2143 && !gimplify_omp_ctxp
2144 /* GNAT introduces temporaries to hold return values of calls in
2145 initializers of variables defined in other units, so the
2146 declaration of the variable is discarded completely. We do not
2147 want to issue poison calls for such dropped variables. */
2148 && (DECL_SEEN_IN_BIND_EXPR_P (decl
)
2149 || (DECL_ARTIFICIAL (decl
) && DECL_NAME (decl
) == NULL_TREE
)))
2151 asan_poisoned_variables
->add (decl
);
2152 asan_poison_variable (decl
, false, seq_p
);
2153 if (!DECL_ARTIFICIAL (decl
) && gimplify_ctxp
->live_switch_vars
)
2154 gimplify_ctxp
->live_switch_vars
->add (decl
);
2157 /* Some front ends do not explicitly declare all anonymous
2158 artificial variables. We compensate here by declaring the
2159 variables, though it would be better if the front ends would
2160 explicitly declare them. */
2161 if (!DECL_SEEN_IN_BIND_EXPR_P (decl
)
2162 && DECL_ARTIFICIAL (decl
) && DECL_NAME (decl
) == NULL_TREE
)
2163 gimple_add_tmp_var (decl
);
2165 if (init
&& init
!= error_mark_node
)
2167 if (!TREE_STATIC (decl
))
2169 DECL_INITIAL (decl
) = NULL_TREE
;
2170 init
= build2 (INIT_EXPR
, void_type_node
, decl
, init
);
2171 gimplify_and_add (init
, seq_p
);
2173 /* Clear TREE_READONLY if we really have an initialization. */
2174 if (!DECL_INITIAL (decl
)
2175 && !omp_privatize_by_reference (decl
))
2176 TREE_READONLY (decl
) = 0;
2179 /* We must still examine initializers for static variables
2180 as they may contain a label address. */
2181 walk_tree (&init
, force_labels_r
, NULL
, NULL
);
2183 /* When there is no explicit initializer, if the user requested,
2184 We should insert an artifical initializer for this automatic
2186 else if (is_var_need_auto_init (decl
)
2187 && !decl_had_value_expr_p
)
2189 gimple_add_init_for_auto_var (decl
,
2192 /* The expanding of a call to the above .DEFERRED_INIT will apply
2193 block initialization to the whole space covered by this variable.
2194 As a result, all the paddings will be initialized to zeroes
2195 for zero initialization and 0xFE byte-repeatable patterns for
2196 pattern initialization.
2197 In order to make the paddings as zeroes for pattern init, We
2198 should add a call to __builtin_clear_padding to clear the
2199 paddings to zero in compatiple with CLANG.
2200 We cannot insert this call if the variable is a gimple register
2201 since __builtin_clear_padding will take the address of the
2202 variable. As a result, if a long double/_Complex long double
2203 variable will spilled into stack later, its padding is 0XFE. */
2204 if (flag_auto_var_init
== AUTO_INIT_PATTERN
2205 && !is_gimple_reg (decl
)
2206 && clear_padding_type_may_have_padding_p (TREE_TYPE (decl
)))
2207 gimple_add_padding_init_for_auto_var (decl
, is_vla
, seq_p
);
2214 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
2215 and replacing the LOOP_EXPR with goto, but if the loop contains an
2216 EXIT_EXPR, we need to append a label for it to jump to. */
2218 static enum gimplify_status
2219 gimplify_loop_expr (tree
*expr_p
, gimple_seq
*pre_p
)
2221 tree saved_label
= gimplify_ctxp
->exit_label
;
2222 tree start_label
= create_artificial_label (UNKNOWN_LOCATION
);
2224 gimplify_seq_add_stmt (pre_p
, gimple_build_label (start_label
));
2226 gimplify_ctxp
->exit_label
= NULL_TREE
;
2228 gimplify_and_add (LOOP_EXPR_BODY (*expr_p
), pre_p
);
2230 gimplify_seq_add_stmt (pre_p
, gimple_build_goto (start_label
));
2232 if (gimplify_ctxp
->exit_label
)
2233 gimplify_seq_add_stmt (pre_p
,
2234 gimple_build_label (gimplify_ctxp
->exit_label
));
2236 gimplify_ctxp
->exit_label
= saved_label
;
2242 /* Gimplify a statement list onto a sequence. These may be created either
2243 by an enlightened front-end, or by shortcut_cond_expr. */
2245 static enum gimplify_status
2246 gimplify_statement_list (tree
*expr_p
, gimple_seq
*pre_p
)
2248 tree temp
= voidify_wrapper_expr (*expr_p
, NULL
);
2250 tree_stmt_iterator i
= tsi_start (*expr_p
);
2252 while (!tsi_end_p (i
))
2254 gimplify_stmt (tsi_stmt_ptr (i
), pre_p
);
2268 /* Emit warning for the unreachable statment STMT if needed.
2269 Return the gimple itself when the warning is emitted, otherwise
2272 emit_warn_switch_unreachable (gimple
*stmt
)
2274 if (gimple_code (stmt
) == GIMPLE_GOTO
2275 && TREE_CODE (gimple_goto_dest (stmt
)) == LABEL_DECL
2276 && DECL_ARTIFICIAL (gimple_goto_dest (stmt
)))
2277 /* Don't warn for compiler-generated gotos. These occur
2278 in Duff's devices, for example. */
2280 else if ((flag_auto_var_init
> AUTO_INIT_UNINITIALIZED
)
2281 && ((gimple_call_internal_p (stmt
, IFN_DEFERRED_INIT
))
2282 || (gimple_call_builtin_p (stmt
, BUILT_IN_CLEAR_PADDING
)
2283 && (bool) TREE_INT_CST_LOW (gimple_call_arg (stmt
, 1)))
2284 || (is_gimple_assign (stmt
)
2285 && gimple_assign_single_p (stmt
)
2286 && (TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
)
2287 && gimple_call_internal_p (
2288 SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt
)),
2289 IFN_DEFERRED_INIT
))))
2290 /* Don't warn for compiler-generated initializations for
2291 -ftrivial-auto-var-init.
2293 case 1: a call to .DEFERRED_INIT;
2294 case 2: a call to __builtin_clear_padding with the 2nd argument is
2295 present and non-zero;
2296 case 3: a gimple assign store right after the call to .DEFERRED_INIT
2297 that has the LHS of .DEFERRED_INIT as the RHS as following:
2298 _1 = .DEFERRED_INIT (4, 2, &"i1"[0]);
2302 warning_at (gimple_location (stmt
), OPT_Wswitch_unreachable
,
2303 "statement will never be executed");
2307 /* Callback for walk_gimple_seq. */
2310 warn_switch_unreachable_and_auto_init_r (gimple_stmt_iterator
*gsi_p
,
2311 bool *handled_ops_p
,
2312 struct walk_stmt_info
*wi
)
2314 gimple
*stmt
= gsi_stmt (*gsi_p
);
2315 bool unreachable_issued
= wi
->info
!= NULL
;
2317 *handled_ops_p
= true;
2318 switch (gimple_code (stmt
))
2321 /* A compiler-generated cleanup or a user-written try block.
2322 If it's empty, don't dive into it--that would result in
2323 worse location info. */
2324 if (gimple_try_eval (stmt
) == NULL
)
2326 if (warn_switch_unreachable
&& !unreachable_issued
)
2327 wi
->info
= emit_warn_switch_unreachable (stmt
);
2329 /* Stop when auto var init warning is not on. */
2330 if (!warn_trivial_auto_var_init
)
2331 return integer_zero_node
;
2336 case GIMPLE_EH_FILTER
:
2337 case GIMPLE_TRANSACTION
:
2338 /* Walk the sub-statements. */
2339 *handled_ops_p
= false;
2343 /* Ignore these. We may generate them before declarations that
2344 are never executed. If there's something to warn about,
2345 there will be non-debug stmts too, and we'll catch those. */
2349 /* Stop till the first Label. */
2350 return integer_zero_node
;
2352 if (gimple_call_internal_p (stmt
, IFN_ASAN_MARK
))
2354 *handled_ops_p
= false;
2357 if (warn_trivial_auto_var_init
2358 && flag_auto_var_init
> AUTO_INIT_UNINITIALIZED
2359 && gimple_call_internal_p (stmt
, IFN_DEFERRED_INIT
))
2361 /* Get the variable name from the 3rd argument of call. */
2362 tree var_name
= gimple_call_arg (stmt
, 2);
2363 var_name
= TREE_OPERAND (TREE_OPERAND (var_name
, 0), 0);
2364 const char *var_name_str
= TREE_STRING_POINTER (var_name
);
2366 warning_at (gimple_location (stmt
), OPT_Wtrivial_auto_var_init
,
2367 "%qs cannot be initialized with"
2368 "%<-ftrivial-auto-var_init%>",
2375 /* check the first "real" statement (not a decl/lexical scope/...), issue
2376 warning if needed. */
2377 if (warn_switch_unreachable
&& !unreachable_issued
)
2378 wi
->info
= emit_warn_switch_unreachable (stmt
);
2379 /* Stop when auto var init warning is not on. */
2380 if (!warn_trivial_auto_var_init
)
2381 return integer_zero_node
;
2388 /* Possibly warn about unreachable statements between switch's controlling
2389 expression and the first case. Also warn about -ftrivial-auto-var-init
2390 cannot initialize the auto variable under such situation.
2391 SEQ is the body of a switch expression. */
2394 maybe_warn_switch_unreachable_and_auto_init (gimple_seq seq
)
2396 if ((!warn_switch_unreachable
&& !warn_trivial_auto_var_init
)
2397 /* This warning doesn't play well with Fortran when optimizations
2399 || lang_GNU_Fortran ()
2403 struct walk_stmt_info wi
;
2405 memset (&wi
, 0, sizeof (wi
));
2406 walk_gimple_seq (seq
, warn_switch_unreachable_and_auto_init_r
, NULL
, &wi
);
2410 /* A label entry that pairs label and a location. */
2417 /* Find LABEL in vector of label entries VEC. */
2419 static struct label_entry
*
2420 find_label_entry (const auto_vec
<struct label_entry
> *vec
, tree label
)
2423 struct label_entry
*l
;
2425 FOR_EACH_VEC_ELT (*vec
, i
, l
)
2426 if (l
->label
== label
)
2431 /* Return true if LABEL, a LABEL_DECL, represents a case label
2432 in a vector of labels CASES. */
2435 case_label_p (const vec
<tree
> *cases
, tree label
)
2440 FOR_EACH_VEC_ELT (*cases
, i
, l
)
2441 if (CASE_LABEL (l
) == label
)
2446 /* Find the last nondebug statement in a scope STMT. */
2449 last_stmt_in_scope (gimple
*stmt
)
2454 switch (gimple_code (stmt
))
2458 gbind
*bind
= as_a
<gbind
*> (stmt
);
2459 stmt
= gimple_seq_last_nondebug_stmt (gimple_bind_body (bind
));
2460 return last_stmt_in_scope (stmt
);
2465 gtry
*try_stmt
= as_a
<gtry
*> (stmt
);
2466 stmt
= gimple_seq_last_nondebug_stmt (gimple_try_eval (try_stmt
));
2467 gimple
*last_eval
= last_stmt_in_scope (stmt
);
2468 if (gimple_stmt_may_fallthru (last_eval
)
2469 && (last_eval
== NULL
2470 || !gimple_call_internal_p (last_eval
, IFN_FALLTHROUGH
))
2471 && gimple_try_kind (try_stmt
) == GIMPLE_TRY_FINALLY
)
2473 stmt
= gimple_seq_last_nondebug_stmt (gimple_try_cleanup (try_stmt
));
2474 return last_stmt_in_scope (stmt
);
2488 /* Collect labels that may fall through into LABELS and return the statement
2489 preceding another case label, or a user-defined label. Store a location
2490 useful to give warnings at *PREVLOC (usually the location of the returned
2491 statement or of its surrounding scope). */
2494 collect_fallthrough_labels (gimple_stmt_iterator
*gsi_p
,
2495 auto_vec
<struct label_entry
> *labels
,
2496 location_t
*prevloc
)
2498 gimple
*prev
= NULL
;
2500 *prevloc
= UNKNOWN_LOCATION
;
2503 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_BIND
)
2505 /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr,
2506 which starts on a GIMPLE_SWITCH and ends with a break label.
2507 Handle that as a single statement that can fall through. */
2508 gbind
*bind
= as_a
<gbind
*> (gsi_stmt (*gsi_p
));
2509 gimple
*first
= gimple_seq_first_stmt (gimple_bind_body (bind
));
2510 gimple
*last
= gimple_seq_last_stmt (gimple_bind_body (bind
));
2512 && gimple_code (first
) == GIMPLE_SWITCH
2513 && gimple_code (last
) == GIMPLE_LABEL
)
2515 tree label
= gimple_label_label (as_a
<glabel
*> (last
));
2516 if (SWITCH_BREAK_LABEL_P (label
))
2524 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_BIND
2525 || gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_TRY
)
2527 /* Nested scope. Only look at the last statement of
2528 the innermost scope. */
2529 location_t bind_loc
= gimple_location (gsi_stmt (*gsi_p
));
2530 gimple
*last
= last_stmt_in_scope (gsi_stmt (*gsi_p
));
2534 /* It might be a label without a location. Use the
2535 location of the scope then. */
2536 if (!gimple_has_location (prev
))
2537 *prevloc
= bind_loc
;
2543 /* Ifs are tricky. */
2544 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_COND
)
2546 gcond
*cond_stmt
= as_a
<gcond
*> (gsi_stmt (*gsi_p
));
2547 tree false_lab
= gimple_cond_false_label (cond_stmt
);
2548 location_t if_loc
= gimple_location (cond_stmt
);
2551 if (i > 1) goto <D.2259>; else goto D;
2552 we can't do much with the else-branch. */
2553 if (!DECL_ARTIFICIAL (false_lab
))
2556 /* Go on until the false label, then one step back. */
2557 for (; !gsi_end_p (*gsi_p
); gsi_next (gsi_p
))
2559 gimple
*stmt
= gsi_stmt (*gsi_p
);
2560 if (gimple_code (stmt
) == GIMPLE_LABEL
2561 && gimple_label_label (as_a
<glabel
*> (stmt
)) == false_lab
)
2565 /* Not found? Oops. */
2566 if (gsi_end_p (*gsi_p
))
2569 /* A dead label can't fall through. */
2570 if (!UNUSED_LABEL_P (false_lab
))
2572 struct label_entry l
= { false_lab
, if_loc
};
2573 labels
->safe_push (l
);
2576 /* Go to the last statement of the then branch. */
2579 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
2585 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_GOTO
2586 && !gimple_has_location (gsi_stmt (*gsi_p
)))
2588 /* Look at the statement before, it might be
2589 attribute fallthrough, in which case don't warn. */
2591 bool fallthru_before_dest
2592 = gimple_call_internal_p (gsi_stmt (*gsi_p
), IFN_FALLTHROUGH
);
2594 tree goto_dest
= gimple_goto_dest (gsi_stmt (*gsi_p
));
2595 if (!fallthru_before_dest
)
2597 struct label_entry l
= { goto_dest
, if_loc
};
2598 labels
->safe_push (l
);
2601 /* This case is about
2602 if (1 != 0) goto <D.2022>; else goto <D.2023>;
2607 where #2 is UNUSED_LABEL_P and we want to warn about #1 falling
2608 through to #3. So set PREV to #1. */
2609 else if (UNUSED_LABEL_P (false_lab
))
2610 prev
= gsi_stmt (*gsi_p
);
2612 /* And move back. */
2616 /* Remember the last statement. Skip labels that are of no interest
2618 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_LABEL
)
2620 tree label
= gimple_label_label (as_a
<glabel
*> (gsi_stmt (*gsi_p
)));
2621 if (find_label_entry (labels
, label
))
2622 prev
= gsi_stmt (*gsi_p
);
2624 else if (gimple_call_internal_p (gsi_stmt (*gsi_p
), IFN_ASAN_MARK
))
2626 else if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_PREDICT
)
2628 else if (!is_gimple_debug (gsi_stmt (*gsi_p
)))
2629 prev
= gsi_stmt (*gsi_p
);
2632 while (!gsi_end_p (*gsi_p
)
2633 /* Stop if we find a case or a user-defined label. */
2634 && (gimple_code (gsi_stmt (*gsi_p
)) != GIMPLE_LABEL
2635 || !gimple_has_location (gsi_stmt (*gsi_p
))));
2637 if (prev
&& gimple_has_location (prev
))
2638 *prevloc
= gimple_location (prev
);
2642 /* Return true if the switch fallthough warning should occur. LABEL is
2643 the label statement that we're falling through to. */
2646 should_warn_for_implicit_fallthrough (gimple_stmt_iterator
*gsi_p
, tree label
)
2648 gimple_stmt_iterator gsi
= *gsi_p
;
2650 /* Don't warn if the label is marked with a "falls through" comment. */
2651 if (FALLTHROUGH_LABEL_P (label
))
2654 /* Don't warn for non-case labels followed by a statement:
2659 as these are likely intentional. */
2660 if (!case_label_p (&gimplify_ctxp
->case_labels
, label
))
2663 while (!gsi_end_p (gsi
)
2664 && gimple_code (gsi_stmt (gsi
)) == GIMPLE_LABEL
2665 && (l
= gimple_label_label (as_a
<glabel
*> (gsi_stmt (gsi
))))
2666 && !case_label_p (&gimplify_ctxp
->case_labels
, l
))
2667 gsi_next_nondebug (&gsi
);
2668 if (gsi_end_p (gsi
) || gimple_code (gsi_stmt (gsi
)) != GIMPLE_LABEL
)
2672 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2673 immediately breaks. */
2676 /* Skip all immediately following labels. */
2677 while (!gsi_end_p (gsi
)
2678 && (gimple_code (gsi_stmt (gsi
)) == GIMPLE_LABEL
2679 || gimple_code (gsi_stmt (gsi
)) == GIMPLE_PREDICT
))
2680 gsi_next_nondebug (&gsi
);
2682 /* { ... something; default:; } */
2684 /* { ... something; default: break; } or
2685 { ... something; default: goto L; } */
2686 || gimple_code (gsi_stmt (gsi
)) == GIMPLE_GOTO
2687 /* { ... something; default: return; } */
2688 || gimple_code (gsi_stmt (gsi
)) == GIMPLE_RETURN
)
2694 /* Callback for walk_gimple_seq. */
2697 warn_implicit_fallthrough_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
2698 struct walk_stmt_info
*)
2700 gimple
*stmt
= gsi_stmt (*gsi_p
);
2702 *handled_ops_p
= true;
2703 switch (gimple_code (stmt
))
2708 case GIMPLE_EH_FILTER
:
2709 case GIMPLE_TRANSACTION
:
2710 /* Walk the sub-statements. */
2711 *handled_ops_p
= false;
2714 /* Find a sequence of form:
2721 and possibly warn. */
2724 /* Found a label. Skip all immediately following labels. */
2725 while (!gsi_end_p (*gsi_p
)
2726 && gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_LABEL
)
2727 gsi_next_nondebug (gsi_p
);
2729 /* There might be no more statements. */
2730 if (gsi_end_p (*gsi_p
))
2731 return integer_zero_node
;
2733 /* Vector of labels that fall through. */
2734 auto_vec
<struct label_entry
> labels
;
2736 gimple
*prev
= collect_fallthrough_labels (gsi_p
, &labels
, &prevloc
);
2738 /* There might be no more statements. */
2739 if (gsi_end_p (*gsi_p
))
2740 return integer_zero_node
;
2742 gimple
*next
= gsi_stmt (*gsi_p
);
2744 /* If what follows is a label, then we may have a fallthrough. */
2745 if (gimple_code (next
) == GIMPLE_LABEL
2746 && gimple_has_location (next
)
2747 && (label
= gimple_label_label (as_a
<glabel
*> (next
)))
2750 struct label_entry
*l
;
2751 bool warned_p
= false;
2752 auto_diagnostic_group d
;
2753 if (!should_warn_for_implicit_fallthrough (gsi_p
, label
))
2755 else if (gimple_code (prev
) == GIMPLE_LABEL
2756 && (label
= gimple_label_label (as_a
<glabel
*> (prev
)))
2757 && (l
= find_label_entry (&labels
, label
)))
2758 warned_p
= warning_at (l
->loc
, OPT_Wimplicit_fallthrough_
,
2759 "this statement may fall through");
2760 else if (!gimple_call_internal_p (prev
, IFN_FALLTHROUGH
)
2761 /* Try to be clever and don't warn when the statement
2762 can't actually fall through. */
2763 && gimple_stmt_may_fallthru (prev
)
2764 && prevloc
!= UNKNOWN_LOCATION
)
2765 warned_p
= warning_at (prevloc
,
2766 OPT_Wimplicit_fallthrough_
,
2767 "this statement may fall through");
2769 inform (gimple_location (next
), "here");
2771 /* Mark this label as processed so as to prevent multiple
2772 warnings in nested switches. */
2773 FALLTHROUGH_LABEL_P (label
) = true;
2775 /* So that next warn_implicit_fallthrough_r will start looking for
2776 a new sequence starting with this label. */
2787 /* Warn when a switch case falls through. */
2790 maybe_warn_implicit_fallthrough (gimple_seq seq
)
2792 if (!warn_implicit_fallthrough
)
2795 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2798 || lang_GNU_OBJC ()))
2801 struct walk_stmt_info wi
;
2802 memset (&wi
, 0, sizeof (wi
));
2803 walk_gimple_seq (seq
, warn_implicit_fallthrough_r
, NULL
, &wi
);
2806 /* Callback for walk_gimple_seq. */
2809 expand_FALLTHROUGH_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
2810 struct walk_stmt_info
*wi
)
2812 gimple
*stmt
= gsi_stmt (*gsi_p
);
2814 *handled_ops_p
= true;
2815 switch (gimple_code (stmt
))
2820 case GIMPLE_EH_FILTER
:
2821 case GIMPLE_TRANSACTION
:
2822 /* Walk the sub-statements. */
2823 *handled_ops_p
= false;
2826 static_cast<location_t
*>(wi
->info
)[0] = UNKNOWN_LOCATION
;
2827 if (gimple_call_internal_p (stmt
, IFN_FALLTHROUGH
))
2829 location_t loc
= gimple_location (stmt
);
2830 gsi_remove (gsi_p
, true);
2831 wi
->removed_stmt
= true;
2833 /* nothrow flag is added by genericize_c_loop to mark fallthrough
2834 statement at the end of some loop's body. Those should be
2835 always diagnosed, either because they indeed don't precede
2836 a case label or default label, or because the next statement
2837 is not within the same iteration statement. */
2838 if ((stmt
->subcode
& GF_CALL_NOTHROW
) != 0)
2840 pedwarn (loc
, 0, "attribute %<fallthrough%> not preceding "
2841 "a case label or default label");
2845 if (gsi_end_p (*gsi_p
))
2847 static_cast<location_t
*>(wi
->info
)[0] = BUILTINS_LOCATION
;
2848 static_cast<location_t
*>(wi
->info
)[1] = loc
;
2854 gimple_stmt_iterator gsi2
= *gsi_p
;
2855 stmt
= gsi_stmt (gsi2
);
2856 if (gimple_code (stmt
) == GIMPLE_GOTO
&& !gimple_has_location (stmt
))
2858 /* Go on until the artificial label. */
2859 tree goto_dest
= gimple_goto_dest (stmt
);
2860 for (; !gsi_end_p (gsi2
); gsi_next (&gsi2
))
2862 if (gimple_code (gsi_stmt (gsi2
)) == GIMPLE_LABEL
2863 && gimple_label_label (as_a
<glabel
*> (gsi_stmt (gsi2
)))
2868 /* Not found? Stop. */
2869 if (gsi_end_p (gsi2
))
2872 /* Look one past it. */
2876 /* We're looking for a case label or default label here. */
2877 while (!gsi_end_p (gsi2
))
2879 stmt
= gsi_stmt (gsi2
);
2880 if (gimple_code (stmt
) == GIMPLE_LABEL
)
2882 tree label
= gimple_label_label (as_a
<glabel
*> (stmt
));
2883 if (gimple_has_location (stmt
) && DECL_ARTIFICIAL (label
))
2889 else if (gimple_call_internal_p (stmt
, IFN_ASAN_MARK
))
2891 else if (!is_gimple_debug (stmt
))
2892 /* Anything else is not expected. */
2897 pedwarn (loc
, 0, "attribute %<fallthrough%> not preceding "
2898 "a case label or default label");
2902 static_cast<location_t
*>(wi
->info
)[0] = UNKNOWN_LOCATION
;
2908 /* Expand all FALLTHROUGH () calls in SEQ. */
2911 expand_FALLTHROUGH (gimple_seq
*seq_p
)
2913 struct walk_stmt_info wi
;
2915 memset (&wi
, 0, sizeof (wi
));
2916 loc
[0] = UNKNOWN_LOCATION
;
2917 loc
[1] = UNKNOWN_LOCATION
;
2918 wi
.info
= (void *) &loc
[0];
2919 walk_gimple_seq_mod (seq_p
, expand_FALLTHROUGH_r
, NULL
, &wi
);
2920 if (loc
[0] != UNKNOWN_LOCATION
)
2921 /* We've found [[fallthrough]]; at the end of a switch, which the C++
2922 standard says is ill-formed; see [dcl.attr.fallthrough]. */
2923 pedwarn (loc
[1], 0, "attribute %<fallthrough%> not preceding "
2924 "a case label or default label");
2928 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2931 static enum gimplify_status
2932 gimplify_switch_expr (tree
*expr_p
, gimple_seq
*pre_p
)
2934 tree switch_expr
= *expr_p
;
2935 gimple_seq switch_body_seq
= NULL
;
2936 enum gimplify_status ret
;
2937 tree index_type
= TREE_TYPE (switch_expr
);
2938 if (index_type
== NULL_TREE
)
2939 index_type
= TREE_TYPE (SWITCH_COND (switch_expr
));
2941 ret
= gimplify_expr (&SWITCH_COND (switch_expr
), pre_p
, NULL
, is_gimple_val
,
2943 if (ret
== GS_ERROR
|| ret
== GS_UNHANDLED
)
2946 if (SWITCH_BODY (switch_expr
))
2949 vec
<tree
> saved_labels
;
2950 hash_set
<tree
> *saved_live_switch_vars
= NULL
;
2951 tree default_case
= NULL_TREE
;
2952 gswitch
*switch_stmt
;
2954 /* Save old labels, get new ones from body, then restore the old
2955 labels. Save all the things from the switch body to append after. */
2956 saved_labels
= gimplify_ctxp
->case_labels
;
2957 gimplify_ctxp
->case_labels
.create (8);
2959 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2960 saved_live_switch_vars
= gimplify_ctxp
->live_switch_vars
;
2961 tree_code body_type
= TREE_CODE (SWITCH_BODY (switch_expr
));
2962 if (body_type
== BIND_EXPR
|| body_type
== STATEMENT_LIST
)
2963 gimplify_ctxp
->live_switch_vars
= new hash_set
<tree
> (4);
2965 gimplify_ctxp
->live_switch_vars
= NULL
;
2967 bool old_in_switch_expr
= gimplify_ctxp
->in_switch_expr
;
2968 gimplify_ctxp
->in_switch_expr
= true;
2970 gimplify_stmt (&SWITCH_BODY (switch_expr
), &switch_body_seq
);
2972 gimplify_ctxp
->in_switch_expr
= old_in_switch_expr
;
2973 maybe_warn_switch_unreachable_and_auto_init (switch_body_seq
);
2974 maybe_warn_implicit_fallthrough (switch_body_seq
);
2975 /* Only do this for the outermost GIMPLE_SWITCH. */
2976 if (!gimplify_ctxp
->in_switch_expr
)
2977 expand_FALLTHROUGH (&switch_body_seq
);
2979 labels
= gimplify_ctxp
->case_labels
;
2980 gimplify_ctxp
->case_labels
= saved_labels
;
2982 if (gimplify_ctxp
->live_switch_vars
)
2984 gcc_assert (gimplify_ctxp
->live_switch_vars
->is_empty ());
2985 delete gimplify_ctxp
->live_switch_vars
;
2987 gimplify_ctxp
->live_switch_vars
= saved_live_switch_vars
;
2989 preprocess_case_label_vec_for_gimple (labels
, index_type
,
2992 bool add_bind
= false;
2995 glabel
*new_default
;
2998 = build_case_label (NULL_TREE
, NULL_TREE
,
2999 create_artificial_label (UNKNOWN_LOCATION
));
3000 if (old_in_switch_expr
)
3002 SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case
)) = 1;
3005 new_default
= gimple_build_label (CASE_LABEL (default_case
));
3006 gimplify_seq_add_stmt (&switch_body_seq
, new_default
);
3008 else if (old_in_switch_expr
)
3010 gimple
*last
= gimple_seq_last_stmt (switch_body_seq
);
3011 if (last
&& gimple_code (last
) == GIMPLE_LABEL
)
3013 tree label
= gimple_label_label (as_a
<glabel
*> (last
));
3014 if (SWITCH_BREAK_LABEL_P (label
))
3019 switch_stmt
= gimple_build_switch (SWITCH_COND (switch_expr
),
3020 default_case
, labels
);
3021 gimple_set_location (switch_stmt
, EXPR_LOCATION (switch_expr
));
3022 /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq
3023 ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL,
3024 wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND,
3025 so that we can easily find the start and end of the switch
3029 gimple_seq bind_body
= NULL
;
3030 gimplify_seq_add_stmt (&bind_body
, switch_stmt
);
3031 gimple_seq_add_seq (&bind_body
, switch_body_seq
);
3032 gbind
*bind
= gimple_build_bind (NULL_TREE
, bind_body
, NULL_TREE
);
3033 gimple_set_location (bind
, EXPR_LOCATION (switch_expr
));
3034 gimplify_seq_add_stmt (pre_p
, bind
);
3038 gimplify_seq_add_stmt (pre_p
, switch_stmt
);
3039 gimplify_seq_add_seq (pre_p
, switch_body_seq
);
3049 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
3051 static enum gimplify_status
3052 gimplify_label_expr (tree
*expr_p
, gimple_seq
*pre_p
)
3054 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p
))
3055 == current_function_decl
);
3057 tree label
= LABEL_EXPR_LABEL (*expr_p
);
3058 glabel
*label_stmt
= gimple_build_label (label
);
3059 gimple_set_location (label_stmt
, EXPR_LOCATION (*expr_p
));
3060 gimplify_seq_add_stmt (pre_p
, label_stmt
);
3062 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label
)))
3063 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_COLD_LABEL
,
3065 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label
)))
3066 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_HOT_LABEL
,
3072 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
3074 static enum gimplify_status
3075 gimplify_case_label_expr (tree
*expr_p
, gimple_seq
*pre_p
)
3077 struct gimplify_ctx
*ctxp
;
3080 /* Invalid programs can play Duff's Device type games with, for example,
3081 #pragma omp parallel. At least in the C front end, we don't
3082 detect such invalid branches until after gimplification, in the
3083 diagnose_omp_blocks pass. */
3084 for (ctxp
= gimplify_ctxp
; ; ctxp
= ctxp
->prev_context
)
3085 if (ctxp
->case_labels
.exists ())
3088 tree label
= CASE_LABEL (*expr_p
);
3089 label_stmt
= gimple_build_label (label
);
3090 gimple_set_location (label_stmt
, EXPR_LOCATION (*expr_p
));
3091 ctxp
->case_labels
.safe_push (*expr_p
);
3092 gimplify_seq_add_stmt (pre_p
, label_stmt
);
3094 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label
)))
3095 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_COLD_LABEL
,
3097 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label
)))
3098 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_HOT_LABEL
,
3104 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
3108 build_and_jump (tree
*label_p
)
3110 if (label_p
== NULL
)
3111 /* If there's nowhere to jump, just fall through. */
3114 if (*label_p
== NULL_TREE
)
3116 tree label
= create_artificial_label (UNKNOWN_LOCATION
);
3120 return build1 (GOTO_EXPR
, void_type_node
, *label_p
);
3123 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
3124 This also involves building a label to jump to and communicating it to
3125 gimplify_loop_expr through gimplify_ctxp->exit_label. */
3127 static enum gimplify_status
3128 gimplify_exit_expr (tree
*expr_p
)
3130 tree cond
= TREE_OPERAND (*expr_p
, 0);
3133 expr
= build_and_jump (&gimplify_ctxp
->exit_label
);
3134 expr
= build3 (COND_EXPR
, void_type_node
, cond
, expr
, NULL_TREE
);
3140 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
3141 different from its canonical type, wrap the whole thing inside a
3142 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
3145 The canonical type of a COMPONENT_REF is the type of the field being
3146 referenced--unless the field is a bit-field which can be read directly
3147 in a smaller mode, in which case the canonical type is the
3148 sign-appropriate type corresponding to that mode. */
3151 canonicalize_component_ref (tree
*expr_p
)
3153 tree expr
= *expr_p
;
3156 gcc_assert (TREE_CODE (expr
) == COMPONENT_REF
);
3158 if (INTEGRAL_TYPE_P (TREE_TYPE (expr
)))
3159 type
= TREE_TYPE (get_unwidened (expr
, NULL_TREE
));
3161 type
= TREE_TYPE (TREE_OPERAND (expr
, 1));
3163 /* One could argue that all the stuff below is not necessary for
3164 the non-bitfield case and declare it a FE error if type
3165 adjustment would be needed. */
3166 if (TREE_TYPE (expr
) != type
)
3168 #ifdef ENABLE_TYPES_CHECKING
3169 tree old_type
= TREE_TYPE (expr
);
3173 /* We need to preserve qualifiers and propagate them from
3175 type_quals
= TYPE_QUALS (type
)
3176 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr
, 0)));
3177 if (TYPE_QUALS (type
) != type_quals
)
3178 type
= build_qualified_type (TYPE_MAIN_VARIANT (type
), type_quals
);
3180 /* Set the type of the COMPONENT_REF to the underlying type. */
3181 TREE_TYPE (expr
) = type
;
3183 #ifdef ENABLE_TYPES_CHECKING
3184 /* It is now a FE error, if the conversion from the canonical
3185 type to the original expression type is not useless. */
3186 gcc_assert (useless_type_conversion_p (old_type
, type
));
3191 /* If a NOP conversion is changing a pointer to array of foo to a pointer
3192 to foo, embed that change in the ADDR_EXPR by converting
3197 where L is the lower bound. For simplicity, only do this for constant
3199 The constraint is that the type of &array[L] is trivially convertible
3203 canonicalize_addr_expr (tree
*expr_p
)
3205 tree expr
= *expr_p
;
3206 tree addr_expr
= TREE_OPERAND (expr
, 0);
3207 tree datype
, ddatype
, pddatype
;
3209 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
3210 if (!POINTER_TYPE_P (TREE_TYPE (expr
))
3211 || TREE_CODE (addr_expr
) != ADDR_EXPR
)
3214 /* The addr_expr type should be a pointer to an array. */
3215 datype
= TREE_TYPE (TREE_TYPE (addr_expr
));
3216 if (TREE_CODE (datype
) != ARRAY_TYPE
)
3219 /* The pointer to element type shall be trivially convertible to
3220 the expression pointer type. */
3221 ddatype
= TREE_TYPE (datype
);
3222 pddatype
= build_pointer_type (ddatype
);
3223 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr
)),
3227 /* The lower bound and element sizes must be constant. */
3228 if (!TYPE_SIZE_UNIT (ddatype
)
3229 || TREE_CODE (TYPE_SIZE_UNIT (ddatype
)) != INTEGER_CST
3230 || !TYPE_DOMAIN (datype
) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype
))
3231 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype
))) != INTEGER_CST
)
3234 /* All checks succeeded. Build a new node to merge the cast. */
3235 *expr_p
= build4 (ARRAY_REF
, ddatype
, TREE_OPERAND (addr_expr
, 0),
3236 TYPE_MIN_VALUE (TYPE_DOMAIN (datype
)),
3237 NULL_TREE
, NULL_TREE
);
3238 *expr_p
= build1 (ADDR_EXPR
, pddatype
, *expr_p
);
3240 /* We can have stripped a required restrict qualifier above. */
3241 if (!useless_type_conversion_p (TREE_TYPE (expr
), TREE_TYPE (*expr_p
)))
3242 *expr_p
= fold_convert (TREE_TYPE (expr
), *expr_p
);
3245 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
3246 underneath as appropriate. */
3248 static enum gimplify_status
3249 gimplify_conversion (tree
*expr_p
)
3251 location_t loc
= EXPR_LOCATION (*expr_p
);
3252 gcc_assert (CONVERT_EXPR_P (*expr_p
));
3254 /* Then strip away all but the outermost conversion. */
3255 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p
, 0));
3257 /* And remove the outermost conversion if it's useless. */
3258 if (tree_ssa_useless_type_conversion (*expr_p
))
3259 *expr_p
= TREE_OPERAND (*expr_p
, 0);
3261 /* If we still have a conversion at the toplevel,
3262 then canonicalize some constructs. */
3263 if (CONVERT_EXPR_P (*expr_p
))
3265 tree sub
= TREE_OPERAND (*expr_p
, 0);
3267 /* If a NOP conversion is changing the type of a COMPONENT_REF
3268 expression, then canonicalize its type now in order to expose more
3269 redundant conversions. */
3270 if (TREE_CODE (sub
) == COMPONENT_REF
)
3271 canonicalize_component_ref (&TREE_OPERAND (*expr_p
, 0));
3273 /* If a NOP conversion is changing a pointer to array of foo
3274 to a pointer to foo, embed that change in the ADDR_EXPR. */
3275 else if (TREE_CODE (sub
) == ADDR_EXPR
)
3276 canonicalize_addr_expr (expr_p
);
3279 /* If we have a conversion to a non-register type force the
3280 use of a VIEW_CONVERT_EXPR instead. */
3281 if (CONVERT_EXPR_P (*expr_p
) && !is_gimple_reg_type (TREE_TYPE (*expr_p
)))
3282 *expr_p
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, TREE_TYPE (*expr_p
),
3283 TREE_OPERAND (*expr_p
, 0));
3285 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
3286 if (TREE_CODE (*expr_p
) == CONVERT_EXPR
)
3287 TREE_SET_CODE (*expr_p
, NOP_EXPR
);
3292 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
3293 DECL_VALUE_EXPR, and it's worth re-examining things. */
3295 static enum gimplify_status
3296 gimplify_var_or_parm_decl (tree
*expr_p
)
3298 tree decl
= *expr_p
;
3300 /* ??? If this is a local variable, and it has not been seen in any
3301 outer BIND_EXPR, then it's probably the result of a duplicate
3302 declaration, for which we've already issued an error. It would
3303 be really nice if the front end wouldn't leak these at all.
3304 Currently the only known culprit is C++ destructors, as seen
3305 in g++.old-deja/g++.jason/binding.C.
3306 Another possible culpit are size expressions for variably modified
3307 types which are lost in the FE or not gimplified correctly. */
3309 && !DECL_SEEN_IN_BIND_EXPR_P (decl
)
3310 && !TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
)
3311 && decl_function_context (decl
) == current_function_decl
)
3313 gcc_assert (seen_error ());
3317 /* When within an OMP context, notice uses of variables. */
3318 if (gimplify_omp_ctxp
&& omp_notice_variable (gimplify_omp_ctxp
, decl
, true))
3321 /* If the decl is an alias for another expression, substitute it now. */
3322 if (DECL_HAS_VALUE_EXPR_P (decl
))
3324 *expr_p
= unshare_expr (DECL_VALUE_EXPR (decl
));
3331 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
3334 recalculate_side_effects (tree t
)
3336 enum tree_code code
= TREE_CODE (t
);
3337 int len
= TREE_OPERAND_LENGTH (t
);
3340 switch (TREE_CODE_CLASS (code
))
3342 case tcc_expression
:
3348 case PREDECREMENT_EXPR
:
3349 case PREINCREMENT_EXPR
:
3350 case POSTDECREMENT_EXPR
:
3351 case POSTINCREMENT_EXPR
:
3352 /* All of these have side-effects, no matter what their
3361 case tcc_comparison
: /* a comparison expression */
3362 case tcc_unary
: /* a unary arithmetic expression */
3363 case tcc_binary
: /* a binary arithmetic expression */
3364 case tcc_reference
: /* a reference */
3365 case tcc_vl_exp
: /* a function call */
3366 TREE_SIDE_EFFECTS (t
) = TREE_THIS_VOLATILE (t
);
3367 for (i
= 0; i
< len
; ++i
)
3369 tree op
= TREE_OPERAND (t
, i
);
3370 if (op
&& TREE_SIDE_EFFECTS (op
))
3371 TREE_SIDE_EFFECTS (t
) = 1;
3376 /* No side-effects. */
3380 if (code
== SSA_NAME
)
3381 /* No side-effects. */
3387 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
3391 : min_lval '[' val ']'
3393 | compound_lval '[' val ']'
3394 | compound_lval '.' ID
3396 This is not part of the original SIMPLE definition, which separates
3397 array and member references, but it seems reasonable to handle them
3398 together. Also, this way we don't run into problems with union
3399 aliasing; gcc requires that for accesses through a union to alias, the
3400 union reference must be explicit, which was not always the case when we
3401 were splitting up array and member refs.
3403 PRE_P points to the sequence where side effects that must happen before
3404 *EXPR_P should be stored.
3406 POST_P points to the sequence where side effects that must happen after
3407 *EXPR_P should be stored. */
3409 static enum gimplify_status
3410 gimplify_compound_lval (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
3411 fallback_t fallback
)
3414 enum gimplify_status ret
= GS_ALL_DONE
, tret
;
3416 location_t loc
= EXPR_LOCATION (*expr_p
);
3417 tree expr
= *expr_p
;
3419 /* Create a stack of the subexpressions so later we can walk them in
3420 order from inner to outer. */
3421 auto_vec
<tree
, 10> expr_stack
;
3423 /* We can handle anything that get_inner_reference can deal with. */
3424 for (p
= expr_p
; ; p
= &TREE_OPERAND (*p
, 0))
3427 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
3428 if (TREE_CODE (*p
) == INDIRECT_REF
)
3429 *p
= fold_indirect_ref_loc (loc
, *p
);
3431 if (handled_component_p (*p
))
3433 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
3434 additional COMPONENT_REFs. */
3435 else if ((VAR_P (*p
) || TREE_CODE (*p
) == PARM_DECL
)
3436 && gimplify_var_or_parm_decl (p
) == GS_OK
)
3441 expr_stack
.safe_push (*p
);
3444 gcc_assert (expr_stack
.length ());
3446 /* Now EXPR_STACK is a stack of pointers to all the refs we've
3447 walked through and P points to the innermost expression.
3449 Java requires that we elaborated nodes in source order. That
3450 means we must gimplify the inner expression followed by each of
3451 the indices, in order. But we can't gimplify the inner
3452 expression until we deal with any variable bounds, sizes, or
3453 positions in order to deal with PLACEHOLDER_EXPRs.
3455 The base expression may contain a statement expression that
3456 has declarations used in size expressions, so has to be
3457 gimplified before gimplifying the size expressions.
3459 So we do this in three steps. First we deal with variable
3460 bounds, sizes, and positions, then we gimplify the base and
3461 ensure it is memory if needed, then we deal with the annotations
3462 for any variables in the components and any indices, from left
3465 bool need_non_reg
= false;
3466 for (i
= expr_stack
.length () - 1; i
>= 0; i
--)
3468 tree t
= expr_stack
[i
];
3470 if (error_operand_p (TREE_OPERAND (t
, 0)))
3473 if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
3475 /* Deal with the low bound and element type size and put them into
3476 the ARRAY_REF. If these values are set, they have already been
3478 if (TREE_OPERAND (t
, 2) == NULL_TREE
)
3480 tree low
= unshare_expr (array_ref_low_bound (t
));
3481 if (!is_gimple_min_invariant (low
))
3483 TREE_OPERAND (t
, 2) = low
;
3487 if (TREE_OPERAND (t
, 3) == NULL_TREE
)
3489 tree elmt_size
= array_ref_element_size (t
);
3490 if (!is_gimple_min_invariant (elmt_size
))
3492 elmt_size
= unshare_expr (elmt_size
);
3493 tree elmt_type
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (t
, 0)));
3494 tree factor
= size_int (TYPE_ALIGN_UNIT (elmt_type
));
3496 /* Divide the element size by the alignment of the element
3498 elmt_size
= size_binop_loc (loc
, EXACT_DIV_EXPR
,
3501 TREE_OPERAND (t
, 3) = elmt_size
;
3504 need_non_reg
= true;
3506 else if (TREE_CODE (t
) == COMPONENT_REF
)
3508 /* Set the field offset into T and gimplify it. */
3509 if (TREE_OPERAND (t
, 2) == NULL_TREE
)
3511 tree offset
= component_ref_field_offset (t
);
3512 if (!is_gimple_min_invariant (offset
))
3514 offset
= unshare_expr (offset
);
3515 tree field
= TREE_OPERAND (t
, 1);
3517 = size_int (DECL_OFFSET_ALIGN (field
) / BITS_PER_UNIT
);
3519 /* Divide the offset by its alignment. */
3520 offset
= size_binop_loc (loc
, EXACT_DIV_EXPR
,
3523 TREE_OPERAND (t
, 2) = offset
;
3526 need_non_reg
= true;
3528 else if (!is_gimple_reg_type (TREE_TYPE (t
)))
3529 /* When the result of an operation, in particular a VIEW_CONVERT_EXPR
3530 is a non-register type then require the base object to be a
3531 non-register as well. */
3532 need_non_reg
= true;
3535 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
3536 so as to match the min_lval predicate. Failure to do so may result
3537 in the creation of large aggregate temporaries. */
3538 tret
= gimplify_expr (p
, pre_p
, post_p
, is_gimple_min_lval
,
3539 fallback
| fb_lvalue
);
3540 ret
= MIN (ret
, tret
);
3541 if (ret
== GS_ERROR
)
3544 /* Step 2a: if we have component references we do not support on
3545 registers then make sure the base isn't a register. Of course
3546 we can only do so if an rvalue is OK. */
3547 if (need_non_reg
&& (fallback
& fb_rvalue
))
3548 prepare_gimple_addressable (p
, pre_p
);
3551 /* Step 3: gimplify size expressions and the indices and operands of
3552 ARRAY_REF. During this loop we also remove any useless conversions.
3553 If we operate on a register also make sure to properly gimplify
3554 to individual operations. */
3556 bool reg_operations
= is_gimple_reg (*p
);
3557 for (; expr_stack
.length () > 0; )
3559 tree t
= expr_stack
.pop ();
3561 if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
3563 gcc_assert (!reg_operations
);
3565 /* Gimplify the low bound and element type size. */
3566 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
, post_p
,
3567 is_gimple_reg
, fb_rvalue
);
3568 ret
= MIN (ret
, tret
);
3570 tret
= gimplify_expr (&TREE_OPERAND (t
, 3), pre_p
, post_p
,
3571 is_gimple_reg
, fb_rvalue
);
3572 ret
= MIN (ret
, tret
);
3574 /* Gimplify the dimension. */
3575 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), pre_p
, post_p
,
3576 is_gimple_val
, fb_rvalue
);
3577 ret
= MIN (ret
, tret
);
3579 else if (TREE_CODE (t
) == COMPONENT_REF
)
3581 gcc_assert (!reg_operations
);
3583 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
, post_p
,
3584 is_gimple_reg
, fb_rvalue
);
3585 ret
= MIN (ret
, tret
);
3587 else if (reg_operations
)
3589 tret
= gimplify_expr (&TREE_OPERAND (t
, 0), pre_p
, post_p
,
3590 is_gimple_val
, fb_rvalue
);
3591 ret
= MIN (ret
, tret
);
3594 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t
, 0));
3596 /* The innermost expression P may have originally had
3597 TREE_SIDE_EFFECTS set which would have caused all the outer
3598 expressions in *EXPR_P leading to P to also have had
3599 TREE_SIDE_EFFECTS set. */
3600 recalculate_side_effects (t
);
3603 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
3604 if ((fallback
& fb_rvalue
) && TREE_CODE (*expr_p
) == COMPONENT_REF
)
3606 canonicalize_component_ref (expr_p
);
3609 expr_stack
.release ();
3611 gcc_assert (*expr_p
== expr
|| ret
!= GS_ALL_DONE
);
3616 /* Gimplify the self modifying expression pointed to by EXPR_P
3619 PRE_P points to the list where side effects that must happen before
3620 *EXPR_P should be stored.
3622 POST_P points to the list where side effects that must happen after
3623 *EXPR_P should be stored.
3625 WANT_VALUE is nonzero iff we want to use the value of this expression
3626 in another expression.
3628 ARITH_TYPE is the type the computation should be performed in. */
3630 enum gimplify_status
3631 gimplify_self_mod_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
3632 bool want_value
, tree arith_type
)
3634 enum tree_code code
;
3635 tree lhs
, lvalue
, rhs
, t1
;
3636 gimple_seq post
= NULL
, *orig_post_p
= post_p
;
3638 enum tree_code arith_code
;
3639 enum gimplify_status ret
;
3640 location_t loc
= EXPR_LOCATION (*expr_p
);
3642 code
= TREE_CODE (*expr_p
);
3644 gcc_assert (code
== POSTINCREMENT_EXPR
|| code
== POSTDECREMENT_EXPR
3645 || code
== PREINCREMENT_EXPR
|| code
== PREDECREMENT_EXPR
);
3647 /* Prefix or postfix? */
3648 if (code
== POSTINCREMENT_EXPR
|| code
== POSTDECREMENT_EXPR
)
3649 /* Faster to treat as prefix if result is not used. */
3650 postfix
= want_value
;
3654 /* For postfix, make sure the inner expression's post side effects
3655 are executed after side effects from this expression. */
3659 /* Add or subtract? */
3660 if (code
== PREINCREMENT_EXPR
|| code
== POSTINCREMENT_EXPR
)
3661 arith_code
= PLUS_EXPR
;
3663 arith_code
= MINUS_EXPR
;
3665 /* Gimplify the LHS into a GIMPLE lvalue. */
3666 lvalue
= TREE_OPERAND (*expr_p
, 0);
3667 ret
= gimplify_expr (&lvalue
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
3668 if (ret
== GS_ERROR
)
3671 /* Extract the operands to the arithmetic operation. */
3673 rhs
= TREE_OPERAND (*expr_p
, 1);
3675 /* For postfix operator, we evaluate the LHS to an rvalue and then use
3676 that as the result value and in the postqueue operation. */
3679 ret
= gimplify_expr (&lhs
, pre_p
, post_p
, is_gimple_val
, fb_rvalue
);
3680 if (ret
== GS_ERROR
)
3683 lhs
= get_initialized_tmp_var (lhs
, pre_p
);
3686 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
3687 if (POINTER_TYPE_P (TREE_TYPE (lhs
)))
3689 rhs
= convert_to_ptrofftype_loc (loc
, rhs
);
3690 if (arith_code
== MINUS_EXPR
)
3691 rhs
= fold_build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (rhs
), rhs
);
3692 t1
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (*expr_p
), lhs
, rhs
);
3695 t1
= fold_convert (TREE_TYPE (*expr_p
),
3696 fold_build2 (arith_code
, arith_type
,
3697 fold_convert (arith_type
, lhs
),
3698 fold_convert (arith_type
, rhs
)));
3702 gimplify_assign (lvalue
, t1
, pre_p
);
3703 gimplify_seq_add_seq (orig_post_p
, post
);
3709 *expr_p
= build2 (MODIFY_EXPR
, TREE_TYPE (lvalue
), lvalue
, t1
);
3714 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3717 maybe_with_size_expr (tree
*expr_p
)
3719 tree expr
= *expr_p
;
3720 tree type
= TREE_TYPE (expr
);
3723 /* If we've already wrapped this or the type is error_mark_node, we can't do
3725 if (TREE_CODE (expr
) == WITH_SIZE_EXPR
3726 || type
== error_mark_node
)
3729 /* If the size isn't known or is a constant, we have nothing to do. */
3730 size
= TYPE_SIZE_UNIT (type
);
3731 if (!size
|| poly_int_tree_p (size
))
3734 /* Otherwise, make a WITH_SIZE_EXPR. */
3735 size
= unshare_expr (size
);
3736 size
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (size
, expr
);
3737 *expr_p
= build2 (WITH_SIZE_EXPR
, type
, expr
, size
);
3740 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3741 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3742 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3743 gimplified to an SSA name. */
3745 enum gimplify_status
3746 gimplify_arg (tree
*arg_p
, gimple_seq
*pre_p
, location_t call_location
,
3749 bool (*test
) (tree
);
3752 /* In general, we allow lvalues for function arguments to avoid
3753 extra overhead of copying large aggregates out of even larger
3754 aggregates into temporaries only to copy the temporaries to
3755 the argument list. Make optimizers happy by pulling out to
3756 temporaries those types that fit in registers. */
3757 if (is_gimple_reg_type (TREE_TYPE (*arg_p
)))
3758 test
= is_gimple_val
, fb
= fb_rvalue
;
3761 test
= is_gimple_lvalue
, fb
= fb_either
;
3762 /* Also strip a TARGET_EXPR that would force an extra copy. */
3763 if (TREE_CODE (*arg_p
) == TARGET_EXPR
)
3765 tree init
= TARGET_EXPR_INITIAL (*arg_p
);
3767 && !VOID_TYPE_P (TREE_TYPE (init
))
3768 /* Currently, due to c++/116015, it is not desirable to
3769 strip a TARGET_EXPR whose initializer is a {}. The
3770 problem is that if we do elide it, we also have to
3771 replace all the occurrences of the slot temporary in the
3772 initializer with the temporary created for the argument.
3773 But we do not have that temporary yet so the replacement
3774 would be quite awkward and it might be needed to resort
3775 back to a PLACEHOLDER_EXPR. Note that stripping the
3776 TARGET_EXPR wouldn't help anyway, as gimplify_expr would
3777 just allocate a temporary to store the CONSTRUCTOR into.
3780 See convert_for_arg_passing for the C++ code that marks
3781 the TARGET_EXPR as eliding or not. */
3782 && TREE_CODE (init
) != CONSTRUCTOR
)
3787 /* If this is a variable sized type, we must remember the size. */
3788 maybe_with_size_expr (arg_p
);
3790 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3791 /* Make sure arguments have the same location as the function call
3793 protected_set_expr_location (*arg_p
, call_location
);
3795 /* There is a sequence point before a function call. Side effects in
3796 the argument list must occur before the actual call. So, when
3797 gimplifying arguments, force gimplify_expr to use an internal
3798 post queue which is then appended to the end of PRE_P. */
3799 return gimplify_expr (arg_p
, pre_p
, NULL
, test
, fb
, allow_ssa
);
3802 /* Don't fold inside offloading or taskreg regions: it can break code by
3803 adding decl references that weren't in the source. We'll do it during
3804 omplower pass instead. */
3807 maybe_fold_stmt (gimple_stmt_iterator
*gsi
)
3809 struct gimplify_omp_ctx
*ctx
;
3810 for (ctx
= gimplify_omp_ctxp
; ctx
; ctx
= ctx
->outer_context
)
3811 if ((ctx
->region_type
& (ORT_TARGET
| ORT_PARALLEL
| ORT_TASK
)) != 0)
3813 else if ((ctx
->region_type
& ORT_HOST_TEAMS
) == ORT_HOST_TEAMS
)
3815 /* Delay folding of builtins until the IL is in consistent state
3816 so the diagnostic machinery can do a better job. */
3817 if (gimple_call_builtin_p (gsi_stmt (*gsi
)))
3819 return fold_stmt (gsi
);
3822 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3823 WANT_VALUE is true if the result of the call is desired. */
3825 static enum gimplify_status
3826 gimplify_call_expr (tree
*expr_p
, gimple_seq
*pre_p
, bool want_value
)
3828 tree fndecl
, parms
, p
, fnptrtype
;
3829 enum gimplify_status ret
;
3832 bool builtin_va_start_p
= false;
3833 location_t loc
= EXPR_LOCATION (*expr_p
);
3835 gcc_assert (TREE_CODE (*expr_p
) == CALL_EXPR
);
3837 /* For reliable diagnostics during inlining, it is necessary that
3838 every call_expr be annotated with file and line. */
3839 if (! EXPR_HAS_LOCATION (*expr_p
))
3840 SET_EXPR_LOCATION (*expr_p
, input_location
);
3842 /* Gimplify internal functions created in the FEs. */
3843 if (CALL_EXPR_FN (*expr_p
) == NULL_TREE
)
3848 nargs
= call_expr_nargs (*expr_p
);
3849 enum internal_fn ifn
= CALL_EXPR_IFN (*expr_p
);
3850 auto_vec
<tree
> vargs (nargs
);
3852 if (ifn
== IFN_ASSUME
)
3854 if (simple_condition_p (CALL_EXPR_ARG (*expr_p
, 0)))
3856 /* If the [[assume (cond)]]; condition is simple
3857 enough and can be evaluated unconditionally
3858 without side-effects, expand it as
3859 if (!cond) __builtin_unreachable (); */
3860 tree fndecl
= builtin_decl_explicit (BUILT_IN_UNREACHABLE
);
3861 *expr_p
= build3 (COND_EXPR
, void_type_node
,
3862 CALL_EXPR_ARG (*expr_p
, 0), void_node
,
3863 build_call_expr_loc (EXPR_LOCATION (*expr_p
),
3867 /* If not optimizing, ignore the assumptions. */
3868 if (!optimize
|| seen_error ())
3870 *expr_p
= NULL_TREE
;
3873 /* Temporarily, until gimple lowering, transform
3880 such that gimple lowering can outline the condition into
3881 a separate function easily. */
3882 tree guard
= create_tmp_var (boolean_type_node
);
3883 *expr_p
= build2 (MODIFY_EXPR
, void_type_node
, guard
,
3884 gimple_boolify (CALL_EXPR_ARG (*expr_p
, 0)));
3885 *expr_p
= build3 (BIND_EXPR
, void_type_node
, NULL
, *expr_p
, NULL
);
3886 push_gimplify_context ();
3887 gimple_seq body
= NULL
;
3888 gimple
*g
= gimplify_and_return_first (*expr_p
, &body
);
3889 pop_gimplify_context (g
);
3890 g
= gimple_build_assume (guard
, body
);
3891 gimple_set_location (g
, loc
);
3892 gimplify_seq_add_stmt (pre_p
, g
);
3893 *expr_p
= NULL_TREE
;
3897 for (i
= 0; i
< nargs
; i
++)
3899 gimplify_arg (&CALL_EXPR_ARG (*expr_p
, i
), pre_p
,
3900 EXPR_LOCATION (*expr_p
));
3901 vargs
.quick_push (CALL_EXPR_ARG (*expr_p
, i
));
3904 gcall
*call
= gimple_build_call_internal_vec (ifn
, vargs
);
3905 gimple_call_set_nothrow (call
, TREE_NOTHROW (*expr_p
));
3906 gimplify_seq_add_stmt (pre_p
, call
);
3910 /* This may be a call to a builtin function.
3912 Builtin function calls may be transformed into different
3913 (and more efficient) builtin function calls under certain
3914 circumstances. Unfortunately, gimplification can muck things
3915 up enough that the builtin expanders are not aware that certain
3916 transformations are still valid.
3918 So we attempt transformation/gimplification of the call before
3919 we gimplify the CALL_EXPR. At this time we do not manage to
3920 transform all calls in the same manner as the expanders do, but
3921 we do transform most of them. */
3922 fndecl
= get_callee_fndecl (*expr_p
);
3923 if (fndecl
&& fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
3924 switch (DECL_FUNCTION_CODE (fndecl
))
3926 CASE_BUILT_IN_ALLOCA
:
3927 /* If the call has been built for a variable-sized object, then we
3928 want to restore the stack level when the enclosing BIND_EXPR is
3929 exited to reclaim the allocated space; otherwise, we precisely
3930 need to do the opposite and preserve the latest stack level. */
3931 if (CALL_ALLOCA_FOR_VAR_P (*expr_p
))
3932 gimplify_ctxp
->save_stack
= true;
3934 gimplify_ctxp
->keep_stack
= true;
3937 case BUILT_IN_VA_START
:
3939 builtin_va_start_p
= true;
3940 if (call_expr_nargs (*expr_p
) < 2)
3942 error ("too few arguments to function %<va_start%>");
3943 *expr_p
= build_empty_stmt (EXPR_LOCATION (*expr_p
));
3947 if (fold_builtin_next_arg (*expr_p
, true))
3949 *expr_p
= build_empty_stmt (EXPR_LOCATION (*expr_p
));
3955 case BUILT_IN_EH_RETURN
:
3956 cfun
->calls_eh_return
= true;
3959 case BUILT_IN_CLEAR_PADDING
:
3960 if (call_expr_nargs (*expr_p
) == 1)
3962 /* Remember the original type of the argument in an internal
3963 dummy second argument, as in GIMPLE pointer conversions are
3964 useless. Also mark this call as not for automatic
3965 initialization in the internal dummy third argument. */
3966 p
= CALL_EXPR_ARG (*expr_p
, 0);
3968 = build_call_expr_loc (EXPR_LOCATION (*expr_p
), fndecl
, 2, p
,
3969 build_zero_cst (TREE_TYPE (p
)));
3977 if (fndecl
&& fndecl_built_in_p (fndecl
))
3979 tree new_tree
= fold_call_expr (input_location
, *expr_p
, !want_value
);
3980 if (new_tree
&& new_tree
!= *expr_p
)
3982 /* There was a transformation of this call which computes the
3983 same value, but in a more efficient way. Return and try
3990 /* Remember the original function pointer type. */
3991 fnptrtype
= TREE_TYPE (CALL_EXPR_FN (*expr_p
));
3996 && (cfun
->curr_properties
& PROP_gimple_any
) == 0)
3998 tree variant
= omp_resolve_declare_variant (fndecl
);
3999 if (variant
!= fndecl
)
4000 CALL_EXPR_FN (*expr_p
) = build1 (ADDR_EXPR
, fnptrtype
, variant
);
4003 /* There is a sequence point before the call, so any side effects in
4004 the calling expression must occur before the actual call. Force
4005 gimplify_expr to use an internal post queue. */
4006 ret
= gimplify_expr (&CALL_EXPR_FN (*expr_p
), pre_p
, NULL
,
4007 is_gimple_call_addr
, fb_rvalue
);
4009 if (ret
== GS_ERROR
)
4012 nargs
= call_expr_nargs (*expr_p
);
4014 /* Get argument types for verification. */
4015 fndecl
= get_callee_fndecl (*expr_p
);
4018 parms
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
4020 parms
= TYPE_ARG_TYPES (TREE_TYPE (fnptrtype
));
4022 if (fndecl
&& DECL_ARGUMENTS (fndecl
))
4023 p
= DECL_ARGUMENTS (fndecl
);
4028 for (i
= 0; i
< nargs
&& p
; i
++, p
= TREE_CHAIN (p
))
4031 /* If the last argument is __builtin_va_arg_pack () and it is not
4032 passed as a named argument, decrease the number of CALL_EXPR
4033 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
4036 && TREE_CODE (CALL_EXPR_ARG (*expr_p
, nargs
- 1)) == CALL_EXPR
)
4038 tree last_arg
= CALL_EXPR_ARG (*expr_p
, nargs
- 1);
4039 tree last_arg_fndecl
= get_callee_fndecl (last_arg
);
4042 && fndecl_built_in_p (last_arg_fndecl
, BUILT_IN_VA_ARG_PACK
))
4044 tree call
= *expr_p
;
4047 *expr_p
= build_call_array_loc (loc
, TREE_TYPE (call
),
4048 CALL_EXPR_FN (call
),
4049 nargs
, CALL_EXPR_ARGP (call
));
4051 /* Copy all CALL_EXPR flags, location and block, except
4052 CALL_EXPR_VA_ARG_PACK flag. */
4053 CALL_EXPR_STATIC_CHAIN (*expr_p
) = CALL_EXPR_STATIC_CHAIN (call
);
4054 CALL_EXPR_TAILCALL (*expr_p
) = CALL_EXPR_TAILCALL (call
);
4055 CALL_EXPR_RETURN_SLOT_OPT (*expr_p
)
4056 = CALL_EXPR_RETURN_SLOT_OPT (call
);
4057 CALL_FROM_THUNK_P (*expr_p
) = CALL_FROM_THUNK_P (call
);
4058 SET_EXPR_LOCATION (*expr_p
, EXPR_LOCATION (call
));
4060 /* Set CALL_EXPR_VA_ARG_PACK. */
4061 CALL_EXPR_VA_ARG_PACK (*expr_p
) = 1;
4065 /* If the call returns twice then after building the CFG the call
4066 argument computations will no longer dominate the call because
4067 we add an abnormal incoming edge to the call. So do not use SSA
4069 bool returns_twice
= call_expr_flags (*expr_p
) & ECF_RETURNS_TWICE
;
4071 /* Gimplify the function arguments. */
4074 for (i
= (PUSH_ARGS_REVERSED
? nargs
- 1 : 0);
4075 PUSH_ARGS_REVERSED
? i
>= 0 : i
< nargs
;
4076 PUSH_ARGS_REVERSED
? i
-- : i
++)
4078 enum gimplify_status t
;
4080 /* Avoid gimplifying the second argument to va_start, which needs to
4081 be the plain PARM_DECL. */
4082 if ((i
!= 1) || !builtin_va_start_p
)
4084 t
= gimplify_arg (&CALL_EXPR_ARG (*expr_p
, i
), pre_p
,
4085 EXPR_LOCATION (*expr_p
), ! returns_twice
);
4093 /* Gimplify the static chain. */
4094 if (CALL_EXPR_STATIC_CHAIN (*expr_p
))
4096 if (fndecl
&& !DECL_STATIC_CHAIN (fndecl
))
4097 CALL_EXPR_STATIC_CHAIN (*expr_p
) = NULL
;
4100 enum gimplify_status t
;
4101 t
= gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p
), pre_p
,
4102 EXPR_LOCATION (*expr_p
), ! returns_twice
);
4108 /* Verify the function result. */
4109 if (want_value
&& fndecl
4110 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype
))))
4112 error_at (loc
, "using result of function returning %<void%>");
4116 /* Try this again in case gimplification exposed something. */
4117 if (ret
!= GS_ERROR
)
4119 tree new_tree
= fold_call_expr (input_location
, *expr_p
, !want_value
);
4121 if (new_tree
&& new_tree
!= *expr_p
)
4123 /* There was a transformation of this call which computes the
4124 same value, but in a more efficient way. Return and try
4132 *expr_p
= error_mark_node
;
4136 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
4137 decl. This allows us to eliminate redundant or useless
4138 calls to "const" functions. */
4139 if (TREE_CODE (*expr_p
) == CALL_EXPR
)
4141 int flags
= call_expr_flags (*expr_p
);
4142 if (flags
& (ECF_CONST
| ECF_PURE
)
4143 /* An infinite loop is considered a side effect. */
4144 && !(flags
& (ECF_LOOPING_CONST_OR_PURE
)))
4145 TREE_SIDE_EFFECTS (*expr_p
) = 0;
4148 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
4149 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
4150 form and delegate the creation of a GIMPLE_CALL to
4151 gimplify_modify_expr. This is always possible because when
4152 WANT_VALUE is true, the caller wants the result of this call into
4153 a temporary, which means that we will emit an INIT_EXPR in
4154 internal_get_tmp_var which will then be handled by
4155 gimplify_modify_expr. */
4158 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
4159 have to do is replicate it as a GIMPLE_CALL tuple. */
4160 gimple_stmt_iterator gsi
;
4161 call
= gimple_build_call_from_tree (*expr_p
, fnptrtype
);
4162 notice_special_calls (call
);
4163 gimplify_seq_add_stmt (pre_p
, call
);
4164 gsi
= gsi_last (*pre_p
);
4165 maybe_fold_stmt (&gsi
);
4166 *expr_p
= NULL_TREE
;
4169 /* Remember the original function type. */
4170 CALL_EXPR_FN (*expr_p
) = build1 (NOP_EXPR
, fnptrtype
,
4171 CALL_EXPR_FN (*expr_p
));
4176 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
4177 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
4179 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
4180 condition is true or false, respectively. If null, we should generate
4181 our own to skip over the evaluation of this specific expression.
4183 LOCUS is the source location of the COND_EXPR.
4185 The condition_uid is a discriminator tag for condition coverage used to map
4186 conditions to its corresponding full Boolean function.
4188 This function is the tree equivalent of do_jump.
4190 shortcut_cond_r should only be called by shortcut_cond_expr. */
4193 shortcut_cond_r (tree pred
, tree
*true_label_p
, tree
*false_label_p
,
4194 location_t locus
, unsigned condition_uid
)
4196 tree local_label
= NULL_TREE
;
4197 tree t
, expr
= NULL
;
4199 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
4200 retain the shortcut semantics. Just insert the gotos here;
4201 shortcut_cond_expr will append the real blocks later. */
4202 if (TREE_CODE (pred
) == TRUTH_ANDIF_EXPR
)
4204 location_t new_locus
;
4206 /* Turn if (a && b) into
4208 if (a); else goto no;
4209 if (b) goto yes; else goto no;
4212 if (false_label_p
== NULL
)
4213 false_label_p
= &local_label
;
4215 /* Keep the original source location on the first 'if'. */
4216 t
= shortcut_cond_r (TREE_OPERAND (pred
, 0), NULL
, false_label_p
, locus
,
4218 append_to_statement_list (t
, &expr
);
4220 /* Set the source location of the && on the second 'if'. */
4221 new_locus
= rexpr_location (pred
, locus
);
4222 t
= shortcut_cond_r (TREE_OPERAND (pred
, 1), true_label_p
, false_label_p
,
4223 new_locus
, condition_uid
);
4224 append_to_statement_list (t
, &expr
);
4226 else if (TREE_CODE (pred
) == TRUTH_ORIF_EXPR
)
4228 location_t new_locus
;
4230 /* Turn if (a || b) into
4233 if (b) goto yes; else goto no;
4236 if (true_label_p
== NULL
)
4237 true_label_p
= &local_label
;
4239 /* Keep the original source location on the first 'if'. */
4240 t
= shortcut_cond_r (TREE_OPERAND (pred
, 0), true_label_p
, NULL
, locus
,
4242 append_to_statement_list (t
, &expr
);
4244 /* Set the source location of the || on the second 'if'. */
4245 new_locus
= rexpr_location (pred
, locus
);
4246 t
= shortcut_cond_r (TREE_OPERAND (pred
, 1), true_label_p
, false_label_p
,
4247 new_locus
, condition_uid
);
4248 append_to_statement_list (t
, &expr
);
4250 else if (TREE_CODE (pred
) == COND_EXPR
4251 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred
, 1)))
4252 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred
, 2))))
4254 location_t new_locus
;
4256 /* As long as we're messing with gotos, turn if (a ? b : c) into
4258 if (b) goto yes; else goto no;
4260 if (c) goto yes; else goto no;
4262 Don't do this if one of the arms has void type, which can happen
4263 in C++ when the arm is throw. */
4265 /* Keep the original source location on the first 'if'. Set the source
4266 location of the ? on the second 'if'. */
4267 new_locus
= rexpr_location (pred
, locus
);
4268 expr
= build3 (COND_EXPR
, void_type_node
, TREE_OPERAND (pred
, 0),
4269 shortcut_cond_r (TREE_OPERAND (pred
, 1), true_label_p
,
4270 false_label_p
, locus
, condition_uid
),
4271 shortcut_cond_r (TREE_OPERAND (pred
, 2), true_label_p
,
4272 false_label_p
, new_locus
,
4274 SET_EXPR_UID (expr
, condition_uid
);
4278 expr
= build3 (COND_EXPR
, void_type_node
, pred
,
4279 build_and_jump (true_label_p
),
4280 build_and_jump (false_label_p
));
4281 SET_EXPR_LOCATION (expr
, locus
);
4282 SET_EXPR_UID (expr
, condition_uid
);
4287 t
= build1 (LABEL_EXPR
, void_type_node
, local_label
);
4288 append_to_statement_list (t
, &expr
);
4294 /* If EXPR is a GOTO_EXPR, return it. If it is a STATEMENT_LIST, skip
4295 any of its leading DEBUG_BEGIN_STMTS and recurse on the subsequent
4296 statement, if it is the last one. Otherwise, return NULL. */
4299 find_goto (tree expr
)
4304 if (TREE_CODE (expr
) == GOTO_EXPR
)
4307 if (TREE_CODE (expr
) != STATEMENT_LIST
)
4310 tree_stmt_iterator i
= tsi_start (expr
);
4312 while (!tsi_end_p (i
) && TREE_CODE (tsi_stmt (i
)) == DEBUG_BEGIN_STMT
)
4315 if (!tsi_one_before_end_p (i
))
4318 return find_goto (tsi_stmt (i
));
4321 /* Same as find_goto, except that it returns NULL if the destination
4322 is not a LABEL_DECL. */
4325 find_goto_label (tree expr
)
4327 tree dest
= find_goto (expr
);
4328 if (dest
&& TREE_CODE (GOTO_DESTINATION (dest
)) == LABEL_DECL
)
4334 /* Given a multi-term condition (ANDIF, ORIF), walk the predicate PRED and tag
4335 every basic condition with CONDITION_UID. Two basic conditions share the
4336 CONDITION_UID discriminator when they belong to the same predicate, which is
4337 used by the condition coverage. Doing this as an explicit step makes for a
4338 simpler implementation than weaving it into the splitting code as the
4339 splitting code eventually calls the entry point gimplfiy_expr which makes
4340 bookkeeping complicated. */
4342 tag_shortcut_cond (tree pred
, unsigned condition_uid
)
4344 if (TREE_CODE (pred
) == TRUTH_ANDIF_EXPR
4345 || TREE_CODE (pred
) == TRUTH_ORIF_EXPR
)
4347 tree fst
= TREE_OPERAND (pred
, 0);
4348 tree lst
= TREE_OPERAND (pred
, 1);
4350 if (TREE_CODE (fst
) == TRUTH_ANDIF_EXPR
4351 || TREE_CODE (fst
) == TRUTH_ORIF_EXPR
)
4352 tag_shortcut_cond (fst
, condition_uid
);
4353 else if (TREE_CODE (fst
) == COND_EXPR
)
4354 SET_EXPR_UID (fst
, condition_uid
);
4356 if (TREE_CODE (lst
) == TRUTH_ANDIF_EXPR
4357 || TREE_CODE (lst
) == TRUTH_ORIF_EXPR
)
4358 tag_shortcut_cond (lst
, condition_uid
);
4359 else if (TREE_CODE (lst
) == COND_EXPR
)
4360 SET_EXPR_UID (lst
, condition_uid
);
4364 /* Given a conditional expression EXPR with short-circuit boolean
4365 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
4366 predicate apart into the equivalent sequence of conditionals. CONDITION_UID
4367 is a the tag/discriminator for this EXPR - all basic conditions in the
4368 expression will be given the same CONDITION_UID. */
4370 shortcut_cond_expr (tree expr
, unsigned condition_uid
)
4372 tree pred
= TREE_OPERAND (expr
, 0);
4373 tree then_
= TREE_OPERAND (expr
, 1);
4374 tree else_
= TREE_OPERAND (expr
, 2);
4375 tree true_label
, false_label
, end_label
, t
;
4377 tree
*false_label_p
;
4378 bool emit_end
, emit_false
, jump_over_else
;
4379 bool then_se
= then_
&& TREE_SIDE_EFFECTS (then_
);
4380 bool else_se
= else_
&& TREE_SIDE_EFFECTS (else_
);
4382 tag_shortcut_cond (pred
, condition_uid
);
4384 /* First do simple transformations. */
4387 /* If there is no 'else', turn
4390 if (a) if (b) then c. */
4391 while (TREE_CODE (pred
) == TRUTH_ANDIF_EXPR
)
4393 /* Keep the original source location on the first 'if'. */
4394 location_t locus
= EXPR_LOC_OR_LOC (expr
, input_location
);
4395 TREE_OPERAND (expr
, 0) = TREE_OPERAND (pred
, 1);
4396 /* Set the source location of the && on the second 'if'. */
4397 if (rexpr_has_location (pred
))
4398 SET_EXPR_LOCATION (expr
, rexpr_location (pred
));
4399 then_
= shortcut_cond_expr (expr
, condition_uid
);
4400 then_se
= then_
&& TREE_SIDE_EFFECTS (then_
);
4401 pred
= TREE_OPERAND (pred
, 0);
4402 expr
= build3 (COND_EXPR
, void_type_node
, pred
, then_
, NULL_TREE
);
4403 SET_EXPR_LOCATION (expr
, locus
);
4409 /* If there is no 'then', turn
4412 if (a); else if (b); else d. */
4413 while (TREE_CODE (pred
) == TRUTH_ORIF_EXPR
)
4415 /* Keep the original source location on the first 'if'. */
4416 location_t locus
= EXPR_LOC_OR_LOC (expr
, input_location
);
4417 TREE_OPERAND (expr
, 0) = TREE_OPERAND (pred
, 1);
4418 /* Set the source location of the || on the second 'if'. */
4419 if (rexpr_has_location (pred
))
4420 SET_EXPR_LOCATION (expr
, rexpr_location (pred
));
4421 else_
= shortcut_cond_expr (expr
, condition_uid
);
4422 else_se
= else_
&& TREE_SIDE_EFFECTS (else_
);
4423 pred
= TREE_OPERAND (pred
, 0);
4424 expr
= build3 (COND_EXPR
, void_type_node
, pred
, NULL_TREE
, else_
);
4425 SET_EXPR_LOCATION (expr
, locus
);
4429 /* The expr tree should also have the expression id set. */
4430 SET_EXPR_UID (expr
, condition_uid
);
4432 /* If we're done, great. */
4433 if (TREE_CODE (pred
) != TRUTH_ANDIF_EXPR
4434 && TREE_CODE (pred
) != TRUTH_ORIF_EXPR
)
4437 /* Otherwise we need to mess with gotos. Change
4440 if (a); else goto no;
4443 and recursively gimplify the condition. */
4445 true_label
= false_label
= end_label
= NULL_TREE
;
4447 /* If our arms just jump somewhere, hijack those labels so we don't
4448 generate jumps to jumps. */
4450 if (tree then_goto
= find_goto_label (then_
))
4452 true_label
= GOTO_DESTINATION (then_goto
);
4457 if (tree else_goto
= find_goto_label (else_
))
4459 false_label
= GOTO_DESTINATION (else_goto
);
4464 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
4466 true_label_p
= &true_label
;
4468 true_label_p
= NULL
;
4470 /* The 'else' branch also needs a label if it contains interesting code. */
4471 if (false_label
|| else_se
)
4472 false_label_p
= &false_label
;
4474 false_label_p
= NULL
;
4476 /* If there was nothing else in our arms, just forward the label(s). */
4477 if (!then_se
&& !else_se
)
4478 return shortcut_cond_r (pred
, true_label_p
, false_label_p
,
4479 EXPR_LOC_OR_LOC (expr
, input_location
), condition_uid
);
4481 /* If our last subexpression already has a terminal label, reuse it. */
4483 t
= expr_last (else_
);
4485 t
= expr_last (then_
);
4488 if (t
&& TREE_CODE (t
) == LABEL_EXPR
)
4489 end_label
= LABEL_EXPR_LABEL (t
);
4491 /* If we don't care about jumping to the 'else' branch, jump to the end
4492 if the condition is false. */
4494 false_label_p
= &end_label
;
4496 /* We only want to emit these labels if we aren't hijacking them. */
4497 emit_end
= (end_label
== NULL_TREE
);
4498 emit_false
= (false_label
== NULL_TREE
);
4500 /* We only emit the jump over the else clause if we have to--if the
4501 then clause may fall through. Otherwise we can wind up with a
4502 useless jump and a useless label at the end of gimplified code,
4503 which will cause us to think that this conditional as a whole
4504 falls through even if it doesn't. If we then inline a function
4505 which ends with such a condition, that can cause us to issue an
4506 inappropriate warning about control reaching the end of a
4507 non-void function. */
4508 jump_over_else
= block_may_fallthru (then_
);
4510 pred
= shortcut_cond_r (pred
, true_label_p
, false_label_p
,
4511 EXPR_LOC_OR_LOC (expr
, input_location
),
4515 append_to_statement_list (pred
, &expr
);
4517 append_to_statement_list (then_
, &expr
);
4522 tree last
= expr_last (expr
);
4523 t
= build_and_jump (&end_label
);
4524 if (rexpr_has_location (last
))
4525 SET_EXPR_LOCATION (t
, rexpr_location (last
));
4526 append_to_statement_list (t
, &expr
);
4530 t
= build1 (LABEL_EXPR
, void_type_node
, false_label
);
4531 append_to_statement_list (t
, &expr
);
4533 append_to_statement_list (else_
, &expr
);
4535 if (emit_end
&& end_label
)
4537 t
= build1 (LABEL_EXPR
, void_type_node
, end_label
);
4538 append_to_statement_list (t
, &expr
);
4544 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
4547 gimple_boolify (tree expr
)
4549 tree type
= TREE_TYPE (expr
);
4550 location_t loc
= EXPR_LOCATION (expr
);
4552 if (TREE_CODE (expr
) == NE_EXPR
4553 && TREE_CODE (TREE_OPERAND (expr
, 0)) == CALL_EXPR
4554 && integer_zerop (TREE_OPERAND (expr
, 1)))
4556 tree call
= TREE_OPERAND (expr
, 0);
4557 tree fn
= get_callee_fndecl (call
);
4559 /* For __builtin_expect ((long) (x), y) recurse into x as well
4560 if x is truth_value_p. */
4562 && fndecl_built_in_p (fn
, BUILT_IN_EXPECT
)
4563 && call_expr_nargs (call
) == 2)
4565 tree arg
= CALL_EXPR_ARG (call
, 0);
4568 if (TREE_CODE (arg
) == NOP_EXPR
4569 && TREE_TYPE (arg
) == TREE_TYPE (call
))
4570 arg
= TREE_OPERAND (arg
, 0);
4571 if (truth_value_p (TREE_CODE (arg
)))
4573 arg
= gimple_boolify (arg
);
4574 CALL_EXPR_ARG (call
, 0)
4575 = fold_convert_loc (loc
, TREE_TYPE (call
), arg
);
4581 switch (TREE_CODE (expr
))
4583 case TRUTH_AND_EXPR
:
4585 case TRUTH_XOR_EXPR
:
4586 case TRUTH_ANDIF_EXPR
:
4587 case TRUTH_ORIF_EXPR
:
4588 /* Also boolify the arguments of truth exprs. */
4589 TREE_OPERAND (expr
, 1) = gimple_boolify (TREE_OPERAND (expr
, 1));
4592 case TRUTH_NOT_EXPR
:
4593 TREE_OPERAND (expr
, 0) = gimple_boolify (TREE_OPERAND (expr
, 0));
4595 /* These expressions always produce boolean results. */
4596 if (TREE_CODE (type
) != BOOLEAN_TYPE
)
4597 TREE_TYPE (expr
) = boolean_type_node
;
4601 switch ((enum annot_expr_kind
) TREE_INT_CST_LOW (TREE_OPERAND (expr
, 1)))
4603 case annot_expr_ivdep_kind
:
4604 case annot_expr_unroll_kind
:
4605 case annot_expr_no_vector_kind
:
4606 case annot_expr_vector_kind
:
4607 case annot_expr_parallel_kind
:
4608 case annot_expr_maybe_infinite_kind
:
4609 TREE_OPERAND (expr
, 0) = gimple_boolify (TREE_OPERAND (expr
, 0));
4610 if (TREE_CODE (type
) != BOOLEAN_TYPE
)
4611 TREE_TYPE (expr
) = boolean_type_node
;
4618 if (COMPARISON_CLASS_P (expr
))
4620 /* These expressions always produce boolean results. */
4621 if (TREE_CODE (type
) != BOOLEAN_TYPE
)
4622 TREE_TYPE (expr
) = boolean_type_node
;
4625 /* Other expressions that get here must have boolean values, but
4626 might need to be converted to the appropriate mode. */
4627 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
4629 return fold_convert_loc (loc
, boolean_type_node
, expr
);
4633 /* Given a conditional expression *EXPR_P without side effects, gimplify
4634 its operands. New statements are inserted to PRE_P. */
4636 static enum gimplify_status
4637 gimplify_pure_cond_expr (tree
*expr_p
, gimple_seq
*pre_p
)
4639 tree expr
= *expr_p
, cond
;
4640 enum gimplify_status ret
, tret
;
4641 enum tree_code code
;
4643 cond
= gimple_boolify (COND_EXPR_COND (expr
));
4645 /* We need to handle && and || specially, as their gimplification
4646 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
4647 code
= TREE_CODE (cond
);
4648 if (code
== TRUTH_ANDIF_EXPR
)
4649 TREE_SET_CODE (cond
, TRUTH_AND_EXPR
);
4650 else if (code
== TRUTH_ORIF_EXPR
)
4651 TREE_SET_CODE (cond
, TRUTH_OR_EXPR
);
4652 ret
= gimplify_expr (&cond
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
4653 COND_EXPR_COND (*expr_p
) = cond
;
4655 tret
= gimplify_expr (&COND_EXPR_THEN (expr
), pre_p
, NULL
,
4656 is_gimple_val
, fb_rvalue
);
4657 ret
= MIN (ret
, tret
);
4658 tret
= gimplify_expr (&COND_EXPR_ELSE (expr
), pre_p
, NULL
,
4659 is_gimple_val
, fb_rvalue
);
4661 return MIN (ret
, tret
);
4664 /* Return true if evaluating EXPR could trap.
4665 EXPR is GENERIC, while tree_could_trap_p can be called
4669 generic_expr_could_trap_p (tree expr
)
4673 if (!expr
|| is_gimple_val (expr
))
4676 if (!EXPR_P (expr
) || tree_could_trap_p (expr
))
4679 n
= TREE_OPERAND_LENGTH (expr
);
4680 for (i
= 0; i
< n
; i
++)
4681 if (generic_expr_could_trap_p (TREE_OPERAND (expr
, i
)))
4687 /* Associate the condition STMT with the discriminator UID. STMTs that are
4688 broken down with ANDIF/ORIF from the same Boolean expression should be given
4689 the same UID; 'if (a && b && c) { if (d || e) ... } ...' should yield the
4690 { a: 1, b: 1, c: 1, d: 2, e: 2 } when gimplification is done. This is used
4691 for condition coverage. */
4693 gimple_associate_condition_with_expr (struct function
*fn
, gcond
*stmt
,
4696 if (!condition_coverage_flag
)
4700 fn
->cond_uids
= new hash_map
<gcond
*, unsigned> ();
4702 fn
->cond_uids
->put (stmt
, uid
);
4705 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
4714 The second form is used when *EXPR_P is of type void.
4716 PRE_P points to the list where side effects that must happen before
4717 *EXPR_P should be stored. */
4719 static enum gimplify_status
4720 gimplify_cond_expr (tree
*expr_p
, gimple_seq
*pre_p
, fallback_t fallback
)
4722 tree expr
= *expr_p
;
4723 tree type
= TREE_TYPE (expr
);
4724 location_t loc
= EXPR_LOCATION (expr
);
4725 tree tmp
, arm1
, arm2
;
4726 enum gimplify_status ret
;
4727 tree label_true
, label_false
, label_cont
;
4728 bool have_then_clause_p
, have_else_clause_p
;
4730 enum tree_code pred_code
;
4731 gimple_seq seq
= NULL
;
4733 /* If this COND_EXPR has a value, copy the values into a temporary within
4735 if (!VOID_TYPE_P (type
))
4737 tree then_
= TREE_OPERAND (expr
, 1), else_
= TREE_OPERAND (expr
, 2);
4740 /* If either an rvalue is ok or we do not require an lvalue, create the
4741 temporary. But we cannot do that if the type is addressable. */
4742 if (((fallback
& fb_rvalue
) || !(fallback
& fb_lvalue
))
4743 && !TREE_ADDRESSABLE (type
))
4745 if (gimplify_ctxp
->allow_rhs_cond_expr
4746 /* If either branch has side effects or could trap, it can't be
4747 evaluated unconditionally. */
4748 && !TREE_SIDE_EFFECTS (then_
)
4749 && !generic_expr_could_trap_p (then_
)
4750 && !TREE_SIDE_EFFECTS (else_
)
4751 && !generic_expr_could_trap_p (else_
))
4752 return gimplify_pure_cond_expr (expr_p
, pre_p
);
4754 tmp
= create_tmp_var (type
, "iftmp");
4758 /* Otherwise, only create and copy references to the values. */
4761 type
= build_pointer_type (type
);
4763 if (!VOID_TYPE_P (TREE_TYPE (then_
)))
4764 then_
= build_fold_addr_expr_loc (loc
, then_
);
4766 if (!VOID_TYPE_P (TREE_TYPE (else_
)))
4767 else_
= build_fold_addr_expr_loc (loc
, else_
);
4770 = build3 (COND_EXPR
, type
, TREE_OPERAND (expr
, 0), then_
, else_
);
4772 tmp
= create_tmp_var (type
, "iftmp");
4773 result
= build_simple_mem_ref_loc (loc
, tmp
);
4776 /* Build the new then clause, `tmp = then_;'. But don't build the
4777 assignment if the value is void; in C++ it can be if it's a throw. */
4778 if (!VOID_TYPE_P (TREE_TYPE (then_
)))
4779 TREE_OPERAND (expr
, 1) = build2 (INIT_EXPR
, type
, tmp
, then_
);
4781 /* Similarly, build the new else clause, `tmp = else_;'. */
4782 if (!VOID_TYPE_P (TREE_TYPE (else_
)))
4783 TREE_OPERAND (expr
, 2) = build2 (INIT_EXPR
, type
, tmp
, else_
);
4785 TREE_TYPE (expr
) = void_type_node
;
4786 recalculate_side_effects (expr
);
4788 /* Move the COND_EXPR to the prequeue. */
4789 gimplify_stmt (&expr
, pre_p
);
4795 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
4796 STRIP_TYPE_NOPS (TREE_OPERAND (expr
, 0));
4797 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == COMPOUND_EXPR
)
4798 gimplify_compound_expr (&TREE_OPERAND (expr
, 0), pre_p
, true);
4800 /* Make sure the condition has BOOLEAN_TYPE. */
4801 TREE_OPERAND (expr
, 0) = gimple_boolify (TREE_OPERAND (expr
, 0));
4803 /* Break apart && and || conditions. */
4804 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == TRUTH_ANDIF_EXPR
4805 || TREE_CODE (TREE_OPERAND (expr
, 0)) == TRUTH_ORIF_EXPR
)
4807 expr
= shortcut_cond_expr (expr
, next_cond_uid ());
4809 if (expr
!= *expr_p
)
4813 /* We can't rely on gimplify_expr to re-gimplify the expanded
4814 form properly, as cleanups might cause the target labels to be
4815 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
4816 set up a conditional context. */
4817 gimple_push_condition ();
4818 gimplify_stmt (expr_p
, &seq
);
4819 gimple_pop_condition (pre_p
);
4820 gimple_seq_add_seq (pre_p
, seq
);
4826 /* Now do the normal gimplification. */
4828 /* Gimplify condition. */
4829 ret
= gimplify_expr (&TREE_OPERAND (expr
, 0), pre_p
, NULL
,
4830 is_gimple_condexpr_for_cond
, fb_rvalue
);
4831 if (ret
== GS_ERROR
)
4833 gcc_assert (TREE_OPERAND (expr
, 0) != NULL_TREE
);
4835 gimple_push_condition ();
4837 have_then_clause_p
= have_else_clause_p
= false;
4838 label_true
= find_goto_label (TREE_OPERAND (expr
, 1));
4840 && DECL_CONTEXT (GOTO_DESTINATION (label_true
)) == current_function_decl
4841 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4842 have different locations, otherwise we end up with incorrect
4843 location information on the branches. */
4845 || !EXPR_HAS_LOCATION (expr
)
4846 || !rexpr_has_location (label_true
)
4847 || EXPR_LOCATION (expr
) == rexpr_location (label_true
)))
4849 have_then_clause_p
= true;
4850 label_true
= GOTO_DESTINATION (label_true
);
4853 label_true
= create_artificial_label (UNKNOWN_LOCATION
);
4854 label_false
= find_goto_label (TREE_OPERAND (expr
, 2));
4856 && DECL_CONTEXT (GOTO_DESTINATION (label_false
)) == current_function_decl
4857 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4858 have different locations, otherwise we end up with incorrect
4859 location information on the branches. */
4861 || !EXPR_HAS_LOCATION (expr
)
4862 || !rexpr_has_location (label_false
)
4863 || EXPR_LOCATION (expr
) == rexpr_location (label_false
)))
4865 have_else_clause_p
= true;
4866 label_false
= GOTO_DESTINATION (label_false
);
4869 label_false
= create_artificial_label (UNKNOWN_LOCATION
);
4871 unsigned cond_uid
= EXPR_COND_UID (expr
);
4873 cond_uid
= next_cond_uid ();
4875 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr
), &pred_code
, &arm1
,
4877 cond_stmt
= gimple_build_cond (pred_code
, arm1
, arm2
, label_true
,
4879 gimple_set_location (cond_stmt
, EXPR_LOCATION (expr
));
4880 gimple_associate_condition_with_expr (cfun
, cond_stmt
, cond_uid
);
4881 copy_warning (cond_stmt
, COND_EXPR_COND (expr
));
4882 gimplify_seq_add_stmt (&seq
, cond_stmt
);
4883 gimple_stmt_iterator gsi
= gsi_last (seq
);
4884 maybe_fold_stmt (&gsi
);
4886 label_cont
= NULL_TREE
;
4887 if (!have_then_clause_p
)
4889 /* For if (...) {} else { code; } put label_true after
4891 if (TREE_OPERAND (expr
, 1) == NULL_TREE
4892 && !have_else_clause_p
4893 && TREE_OPERAND (expr
, 2) != NULL_TREE
)
4895 /* For if (0) {} else { code; } tell -Wimplicit-fallthrough
4896 handling that label_cont == label_true can be only reached
4897 through fallthrough from { code; }. */
4898 if (integer_zerop (COND_EXPR_COND (expr
)))
4899 UNUSED_LABEL_P (label_true
) = 1;
4900 label_cont
= label_true
;
4904 bool then_side_effects
4905 = (TREE_OPERAND (expr
, 1)
4906 && TREE_SIDE_EFFECTS (TREE_OPERAND (expr
, 1)));
4907 gimplify_seq_add_stmt (&seq
, gimple_build_label (label_true
));
4908 have_then_clause_p
= gimplify_stmt (&TREE_OPERAND (expr
, 1), &seq
);
4909 /* For if (...) { code; } else {} or
4910 if (...) { code; } else goto label; or
4911 if (...) { code; return; } else { ... }
4912 label_cont isn't needed. */
4913 if (!have_else_clause_p
4914 && TREE_OPERAND (expr
, 2) != NULL_TREE
4915 && gimple_seq_may_fallthru (seq
))
4918 label_cont
= create_artificial_label (UNKNOWN_LOCATION
);
4920 /* For if (0) { non-side-effect-code } else { code }
4921 tell -Wimplicit-fallthrough handling that label_cont can
4922 be only reached through fallthrough from { code }. */
4923 if (integer_zerop (COND_EXPR_COND (expr
)))
4925 UNUSED_LABEL_P (label_true
) = 1;
4926 if (!then_side_effects
)
4927 UNUSED_LABEL_P (label_cont
) = 1;
4930 g
= gimple_build_goto (label_cont
);
4932 /* GIMPLE_COND's are very low level; they have embedded
4933 gotos. This particular embedded goto should not be marked
4934 with the location of the original COND_EXPR, as it would
4935 correspond to the COND_EXPR's condition, not the ELSE or the
4936 THEN arms. To avoid marking it with the wrong location, flag
4937 it as "no location". */
4938 gimple_set_do_not_emit_location (g
);
4940 gimplify_seq_add_stmt (&seq
, g
);
4944 if (!have_else_clause_p
)
4946 /* For if (1) { code } or if (1) { code } else { non-side-effect-code }
4947 tell -Wimplicit-fallthrough handling that label_false can be only
4948 reached through fallthrough from { code }. */
4949 if (integer_nonzerop (COND_EXPR_COND (expr
))
4950 && (TREE_OPERAND (expr
, 2) == NULL_TREE
4951 || !TREE_SIDE_EFFECTS (TREE_OPERAND (expr
, 2))))
4952 UNUSED_LABEL_P (label_false
) = 1;
4953 gimplify_seq_add_stmt (&seq
, gimple_build_label (label_false
));
4954 have_else_clause_p
= gimplify_stmt (&TREE_OPERAND (expr
, 2), &seq
);
4957 gimplify_seq_add_stmt (&seq
, gimple_build_label (label_cont
));
4959 gimple_pop_condition (pre_p
);
4960 gimple_seq_add_seq (pre_p
, seq
);
4962 if (ret
== GS_ERROR
)
4964 else if (have_then_clause_p
|| have_else_clause_p
)
4968 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4969 expr
= TREE_OPERAND (expr
, 0);
4970 gimplify_stmt (&expr
, pre_p
);
4977 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4978 to be marked addressable.
4980 We cannot rely on such an expression being directly markable if a temporary
4981 has been created by the gimplification. In this case, we create another
4982 temporary and initialize it with a copy, which will become a store after we
4983 mark it addressable. This can happen if the front-end passed us something
4984 that it could not mark addressable yet, like a Fortran pass-by-reference
4985 parameter (int) floatvar. */
4988 prepare_gimple_addressable (tree
*expr_p
, gimple_seq
*seq_p
)
4990 while (handled_component_p (*expr_p
))
4991 expr_p
= &TREE_OPERAND (*expr_p
, 0);
4993 /* Do not allow an SSA name as the temporary. */
4994 if (is_gimple_reg (*expr_p
))
4995 *expr_p
= internal_get_tmp_var (*expr_p
, seq_p
, NULL
, false, false, true);
4998 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4999 a call to __builtin_memcpy. */
5001 static enum gimplify_status
5002 gimplify_modify_expr_to_memcpy (tree
*expr_p
, tree size
, bool want_value
,
5005 tree t
, to
, to_ptr
, from
, from_ptr
;
5007 location_t loc
= EXPR_LOCATION (*expr_p
);
5009 to
= TREE_OPERAND (*expr_p
, 0);
5010 from
= TREE_OPERAND (*expr_p
, 1);
5011 gcc_assert (ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (to
)))
5012 && ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (from
))));
5014 /* Mark the RHS addressable. Beware that it may not be possible to do so
5015 directly if a temporary has been created by the gimplification. */
5016 prepare_gimple_addressable (&from
, seq_p
);
5018 mark_addressable (from
);
5019 from_ptr
= build_fold_addr_expr_loc (loc
, from
);
5020 gimplify_arg (&from_ptr
, seq_p
, loc
);
5022 mark_addressable (to
);
5023 to_ptr
= build_fold_addr_expr_loc (loc
, to
);
5024 gimplify_arg (&to_ptr
, seq_p
, loc
);
5026 t
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
5028 gs
= gimple_build_call (t
, 3, to_ptr
, from_ptr
, size
);
5029 gimple_call_set_alloca_for_var (gs
, true);
5033 /* tmp = memcpy() */
5034 t
= create_tmp_var (TREE_TYPE (to_ptr
));
5035 gimple_call_set_lhs (gs
, t
);
5036 gimplify_seq_add_stmt (seq_p
, gs
);
5038 *expr_p
= build_simple_mem_ref (t
);
5042 gimplify_seq_add_stmt (seq_p
, gs
);
5047 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
5048 a call to __builtin_memset. In this case we know that the RHS is
5049 a CONSTRUCTOR with an empty element list. */
5051 static enum gimplify_status
5052 gimplify_modify_expr_to_memset (tree
*expr_p
, tree size
, bool want_value
,
5055 tree t
, from
, to
, to_ptr
;
5057 location_t loc
= EXPR_LOCATION (*expr_p
);
5059 /* Assert our assumptions, to abort instead of producing wrong code
5060 silently if they are not met. Beware that the RHS CONSTRUCTOR might
5061 not be immediately exposed. */
5062 from
= TREE_OPERAND (*expr_p
, 1);
5063 if (TREE_CODE (from
) == WITH_SIZE_EXPR
)
5064 from
= TREE_OPERAND (from
, 0);
5066 gcc_assert (TREE_CODE (from
) == CONSTRUCTOR
5067 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from
)));
5070 to
= TREE_OPERAND (*expr_p
, 0);
5071 gcc_assert (ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (to
))));
5073 to_ptr
= build_fold_addr_expr_loc (loc
, to
);
5074 gimplify_arg (&to_ptr
, seq_p
, loc
);
5075 t
= builtin_decl_implicit (BUILT_IN_MEMSET
);
5077 gs
= gimple_build_call (t
, 3, to_ptr
, integer_zero_node
, size
);
5081 /* tmp = memset() */
5082 t
= create_tmp_var (TREE_TYPE (to_ptr
));
5083 gimple_call_set_lhs (gs
, t
);
5084 gimplify_seq_add_stmt (seq_p
, gs
);
5086 *expr_p
= build1 (INDIRECT_REF
, TREE_TYPE (to
), t
);
5090 gimplify_seq_add_stmt (seq_p
, gs
);
5095 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
5096 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
5097 assignment. Return non-null if we detect a potential overlap. */
5099 struct gimplify_init_ctor_preeval_data
5101 /* The base decl of the lhs object. May be NULL, in which case we
5102 have to assume the lhs is indirect. */
5105 /* The alias set of the lhs object. */
5106 alias_set_type lhs_alias_set
;
5110 gimplify_init_ctor_preeval_1 (tree
*tp
, int *walk_subtrees
, void *xdata
)
5112 struct gimplify_init_ctor_preeval_data
*data
5113 = (struct gimplify_init_ctor_preeval_data
*) xdata
;
5116 /* If we find the base object, obviously we have overlap. */
5117 if (data
->lhs_base_decl
== t
)
5120 /* If the constructor component is indirect, determine if we have a
5121 potential overlap with the lhs. The only bits of information we
5122 have to go on at this point are addressability and alias sets. */
5123 if ((INDIRECT_REF_P (t
)
5124 || TREE_CODE (t
) == MEM_REF
)
5125 && (!data
->lhs_base_decl
|| TREE_ADDRESSABLE (data
->lhs_base_decl
))
5126 && alias_sets_conflict_p (data
->lhs_alias_set
, get_alias_set (t
)))
5129 /* If the constructor component is a call, determine if it can hide a
5130 potential overlap with the lhs through an INDIRECT_REF like above.
5131 ??? Ugh - this is completely broken. In fact this whole analysis
5132 doesn't look conservative. */
5133 if (TREE_CODE (t
) == CALL_EXPR
)
5135 tree type
, fntype
= TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t
)));
5137 for (type
= TYPE_ARG_TYPES (fntype
); type
; type
= TREE_CHAIN (type
))
5138 if (POINTER_TYPE_P (TREE_VALUE (type
))
5139 && (!data
->lhs_base_decl
|| TREE_ADDRESSABLE (data
->lhs_base_decl
))
5140 && alias_sets_conflict_p (data
->lhs_alias_set
,
5142 (TREE_TYPE (TREE_VALUE (type
)))))
5146 if (IS_TYPE_OR_DECL_P (t
))
5151 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
5152 force values that overlap with the lhs (as described by *DATA)
5153 into temporaries. */
5156 gimplify_init_ctor_preeval (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
5157 struct gimplify_init_ctor_preeval_data
*data
)
5159 enum gimplify_status one
;
5161 /* If the value is constant, then there's nothing to pre-evaluate. */
5162 if (TREE_CONSTANT (*expr_p
))
5164 /* Ensure it does not have side effects, it might contain a reference to
5165 the object we're initializing. */
5166 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p
));
5170 /* If the type has non-trivial constructors, we can't pre-evaluate. */
5171 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p
)))
5174 /* Recurse for nested constructors. */
5175 if (TREE_CODE (*expr_p
) == CONSTRUCTOR
)
5177 unsigned HOST_WIDE_INT ix
;
5178 constructor_elt
*ce
;
5179 vec
<constructor_elt
, va_gc
> *v
= CONSTRUCTOR_ELTS (*expr_p
);
5181 FOR_EACH_VEC_SAFE_ELT (v
, ix
, ce
)
5182 gimplify_init_ctor_preeval (&ce
->value
, pre_p
, post_p
, data
);
5187 /* If this is a variable sized type, we must remember the size. */
5188 maybe_with_size_expr (expr_p
);
5190 /* Gimplify the constructor element to something appropriate for the rhs
5191 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
5192 the gimplifier will consider this a store to memory. Doing this
5193 gimplification now means that we won't have to deal with complicated
5194 language-specific trees, nor trees like SAVE_EXPR that can induce
5195 exponential search behavior. */
5196 one
= gimplify_expr (expr_p
, pre_p
, post_p
, is_gimple_mem_rhs
, fb_rvalue
);
5197 if (one
== GS_ERROR
)
5203 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
5204 with the lhs, since "a = { .x=a }" doesn't make sense. This will
5205 always be true for all scalars, since is_gimple_mem_rhs insists on a
5206 temporary variable for them. */
5207 if (DECL_P (*expr_p
))
5210 /* If this is of variable size, we have no choice but to assume it doesn't
5211 overlap since we can't make a temporary for it. */
5212 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p
))) != INTEGER_CST
)
5215 /* Otherwise, we must search for overlap ... */
5216 if (!walk_tree (expr_p
, gimplify_init_ctor_preeval_1
, data
, NULL
))
5219 /* ... and if found, force the value into a temporary. */
5220 *expr_p
= get_formal_tmp_var (*expr_p
, pre_p
);
5223 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
5224 a RANGE_EXPR in a CONSTRUCTOR for an array.
5228 object[var] = value;
5235 We increment var _after_ the loop exit check because we might otherwise
5236 fail if upper == TYPE_MAX_VALUE (type for upper).
5238 Note that we never have to deal with SAVE_EXPRs here, because this has
5239 already been taken care of for us, in gimplify_init_ctor_preeval(). */
5241 static void gimplify_init_ctor_eval (tree
, vec
<constructor_elt
, va_gc
> *,
5242 gimple_seq
*, bool);
5245 gimplify_init_ctor_eval_range (tree object
, tree lower
, tree upper
,
5246 tree value
, tree array_elt_type
,
5247 gimple_seq
*pre_p
, bool cleared
)
5249 tree loop_entry_label
, loop_exit_label
, fall_thru_label
;
5250 tree var
, var_type
, cref
, tmp
;
5252 loop_entry_label
= create_artificial_label (UNKNOWN_LOCATION
);
5253 loop_exit_label
= create_artificial_label (UNKNOWN_LOCATION
);
5254 fall_thru_label
= create_artificial_label (UNKNOWN_LOCATION
);
5256 /* Create and initialize the index variable. */
5257 var_type
= TREE_TYPE (upper
);
5258 var
= create_tmp_var (var_type
);
5259 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (var
, lower
));
5261 /* Add the loop entry label. */
5262 gimplify_seq_add_stmt (pre_p
, gimple_build_label (loop_entry_label
));
5264 /* Build the reference. */
5265 cref
= build4 (ARRAY_REF
, array_elt_type
, unshare_expr (object
),
5266 var
, NULL_TREE
, NULL_TREE
);
5268 /* If we are a constructor, just call gimplify_init_ctor_eval to do
5269 the store. Otherwise just assign value to the reference. */
5271 if (TREE_CODE (value
) == CONSTRUCTOR
)
5272 /* NB we might have to call ourself recursively through
5273 gimplify_init_ctor_eval if the value is a constructor. */
5274 gimplify_init_ctor_eval (cref
, CONSTRUCTOR_ELTS (value
),
5278 if (gimplify_expr (&value
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
5280 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (cref
, value
));
5283 /* We exit the loop when the index var is equal to the upper bound. */
5284 gimplify_seq_add_stmt (pre_p
,
5285 gimple_build_cond (EQ_EXPR
, var
, upper
,
5286 loop_exit_label
, fall_thru_label
));
5288 gimplify_seq_add_stmt (pre_p
, gimple_build_label (fall_thru_label
));
5290 /* Otherwise, increment the index var... */
5291 tmp
= build2 (PLUS_EXPR
, var_type
, var
,
5292 fold_convert (var_type
, integer_one_node
));
5293 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (var
, tmp
));
5295 /* ...and jump back to the loop entry. */
5296 gimplify_seq_add_stmt (pre_p
, gimple_build_goto (loop_entry_label
));
5298 /* Add the loop exit label. */
5299 gimplify_seq_add_stmt (pre_p
, gimple_build_label (loop_exit_label
));
5302 /* A subroutine of gimplify_init_constructor. Generate individual
5303 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
5304 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
5305 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
5309 gimplify_init_ctor_eval (tree object
, vec
<constructor_elt
, va_gc
> *elts
,
5310 gimple_seq
*pre_p
, bool cleared
)
5312 tree array_elt_type
= NULL
;
5313 unsigned HOST_WIDE_INT ix
;
5314 tree purpose
, value
;
5316 if (TREE_CODE (TREE_TYPE (object
)) == ARRAY_TYPE
)
5317 array_elt_type
= TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object
)));
5319 FOR_EACH_CONSTRUCTOR_ELT (elts
, ix
, purpose
, value
)
5323 /* NULL values are created above for gimplification errors. */
5327 if (cleared
&& initializer_zerop (value
))
5330 /* ??? Here's to hoping the front end fills in all of the indices,
5331 so we don't have to figure out what's missing ourselves. */
5332 gcc_assert (purpose
);
5334 /* Skip zero-sized fields, unless value has side-effects. This can
5335 happen with calls to functions returning a empty type, which
5336 we shouldn't discard. As a number of downstream passes don't
5337 expect sets of empty type fields, we rely on the gimplification of
5338 the MODIFY_EXPR we make below to drop the assignment statement. */
5339 if (!TREE_SIDE_EFFECTS (value
)
5340 && TREE_CODE (purpose
) == FIELD_DECL
5341 && is_empty_type (TREE_TYPE (purpose
)))
5344 /* If we have a RANGE_EXPR, we have to build a loop to assign the
5346 if (TREE_CODE (purpose
) == RANGE_EXPR
)
5348 tree lower
= TREE_OPERAND (purpose
, 0);
5349 tree upper
= TREE_OPERAND (purpose
, 1);
5351 /* If the lower bound is equal to upper, just treat it as if
5352 upper was the index. */
5353 if (simple_cst_equal (lower
, upper
))
5357 gimplify_init_ctor_eval_range (object
, lower
, upper
, value
,
5358 array_elt_type
, pre_p
, cleared
);
5365 /* Do not use bitsizetype for ARRAY_REF indices. */
5366 if (TYPE_DOMAIN (TREE_TYPE (object
)))
5368 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object
))),
5370 cref
= build4 (ARRAY_REF
, array_elt_type
, unshare_expr (object
),
5371 purpose
, NULL_TREE
, NULL_TREE
);
5375 gcc_assert (TREE_CODE (purpose
) == FIELD_DECL
);
5376 cref
= build3 (COMPONENT_REF
, TREE_TYPE (purpose
),
5377 unshare_expr (object
), purpose
, NULL_TREE
);
5380 if (TREE_CODE (value
) == CONSTRUCTOR
5381 && TREE_CODE (TREE_TYPE (value
)) != VECTOR_TYPE
)
5382 gimplify_init_ctor_eval (cref
, CONSTRUCTOR_ELTS (value
),
5384 else if (TREE_CODE (value
) == RAW_DATA_CST
)
5386 if (RAW_DATA_LENGTH (value
) <= 32)
5388 for (unsigned int i
= 0; i
< (unsigned) RAW_DATA_LENGTH (value
);
5390 if (!cleared
|| RAW_DATA_POINTER (value
)[i
])
5395 = fold_build2 (PLUS_EXPR
, TREE_TYPE (purpose
),
5397 build_int_cst (TREE_TYPE (purpose
),
5399 cref
= build4 (ARRAY_REF
, array_elt_type
,
5400 unshare_expr (object
), p
, NULL_TREE
,
5404 = build2 (INIT_EXPR
, TREE_TYPE (cref
), cref
,
5405 build_int_cst (TREE_TYPE (value
),
5406 ((const unsigned char *)
5407 RAW_DATA_POINTER (value
))[i
]));
5408 gimplify_and_add (init
, pre_p
);
5414 tree rtype
= build_array_type_nelts (TREE_TYPE (value
),
5415 RAW_DATA_LENGTH (value
));
5416 tree rctor
= build_constructor_single (rtype
, bitsize_zero_node
,
5418 tree addr
= build_fold_addr_expr (cref
);
5419 cref
= build2 (MEM_REF
, rtype
, addr
,
5420 build_int_cst (ptr_type_node
, 0));
5421 rctor
= tree_output_constant_def (rctor
);
5422 if (gimplify_expr (&cref
, pre_p
, NULL
, is_gimple_lvalue
,
5423 fb_lvalue
) != GS_ERROR
)
5424 gimplify_seq_add_stmt (pre_p
,
5425 gimple_build_assign (cref
, rctor
));
5430 tree init
= build2 (INIT_EXPR
, TREE_TYPE (cref
), cref
, value
);
5431 gimplify_and_add (init
, pre_p
);
5437 /* Return the appropriate RHS predicate for this LHS. */
5440 rhs_predicate_for (tree lhs
)
5442 if (is_gimple_reg (lhs
))
5443 return is_gimple_reg_rhs_or_call
;
5445 return is_gimple_mem_rhs_or_call
;
5448 /* Return the initial guess for an appropriate RHS predicate for this LHS,
5449 before the LHS has been gimplified. */
5451 static gimple_predicate
5452 initial_rhs_predicate_for (tree lhs
)
5454 if (is_gimple_reg_type (TREE_TYPE (lhs
)))
5455 return is_gimple_reg_rhs_or_call
;
5457 return is_gimple_mem_rhs_or_call
;
5460 /* Gimplify a C99 compound literal expression. This just means adding
5461 the DECL_EXPR before the current statement and using its anonymous
5464 static enum gimplify_status
5465 gimplify_compound_literal_expr (tree
*expr_p
, gimple_seq
*pre_p
,
5466 bool (*gimple_test_f
) (tree
),
5467 fallback_t fallback
)
5469 tree decl_s
= COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p
);
5470 tree decl
= DECL_EXPR_DECL (decl_s
);
5471 tree init
= DECL_INITIAL (decl
);
5472 /* Mark the decl as addressable if the compound literal
5473 expression is addressable now, otherwise it is marked too late
5474 after we gimplify the initialization expression. */
5475 if (TREE_ADDRESSABLE (*expr_p
))
5476 TREE_ADDRESSABLE (decl
) = 1;
5477 /* Otherwise, if we don't need an lvalue and have a literal directly
5478 substitute it. Check if it matches the gimple predicate, as
5479 otherwise we'd generate a new temporary, and we can as well just
5480 use the decl we already have. */
5481 else if (!TREE_ADDRESSABLE (decl
)
5482 && !TREE_THIS_VOLATILE (decl
)
5484 && (fallback
& fb_lvalue
) == 0
5485 && gimple_test_f (init
))
5491 /* If the decl is not addressable, then it is being used in some
5492 expression or on the right hand side of a statement, and it can
5493 be put into a readonly data section. */
5494 if (!TREE_ADDRESSABLE (decl
) && (fallback
& fb_lvalue
) == 0)
5495 TREE_READONLY (decl
) = 1;
5497 /* This decl isn't mentioned in the enclosing block, so add it to the
5498 list of temps. FIXME it seems a bit of a kludge to say that
5499 anonymous artificial vars aren't pushed, but everything else is. */
5500 if (DECL_NAME (decl
) == NULL_TREE
&& !DECL_SEEN_IN_BIND_EXPR_P (decl
))
5501 gimple_add_tmp_var (decl
);
5503 gimplify_and_add (decl_s
, pre_p
);
5508 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
5509 return a new CONSTRUCTOR if something changed. */
5512 optimize_compound_literals_in_ctor (tree orig_ctor
)
5514 tree ctor
= orig_ctor
;
5515 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (ctor
);
5516 unsigned int idx
, num
= vec_safe_length (elts
);
5518 for (idx
= 0; idx
< num
; idx
++)
5520 tree value
= (*elts
)[idx
].value
;
5521 tree newval
= value
;
5522 if (TREE_CODE (value
) == CONSTRUCTOR
)
5523 newval
= optimize_compound_literals_in_ctor (value
);
5524 else if (TREE_CODE (value
) == COMPOUND_LITERAL_EXPR
)
5526 tree decl_s
= COMPOUND_LITERAL_EXPR_DECL_EXPR (value
);
5527 tree decl
= DECL_EXPR_DECL (decl_s
);
5528 tree init
= DECL_INITIAL (decl
);
5530 if (!TREE_ADDRESSABLE (value
)
5531 && !TREE_ADDRESSABLE (decl
)
5533 && TREE_CODE (init
) == CONSTRUCTOR
)
5534 newval
= optimize_compound_literals_in_ctor (init
);
5536 if (newval
== value
)
5539 if (ctor
== orig_ctor
)
5541 ctor
= copy_node (orig_ctor
);
5542 CONSTRUCTOR_ELTS (ctor
) = vec_safe_copy (elts
);
5543 elts
= CONSTRUCTOR_ELTS (ctor
);
5545 (*elts
)[idx
].value
= newval
;
5550 /* A subroutine of gimplify_modify_expr. Break out elements of a
5551 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
5553 Note that we still need to clear any elements that don't have explicit
5554 initializers, so if not all elements are initialized we keep the
5555 original MODIFY_EXPR, we just remove all of the constructor elements.
5557 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
5558 GS_ERROR if we would have to create a temporary when gimplifying
5559 this constructor. Otherwise, return GS_OK.
5561 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
5563 static enum gimplify_status
5564 gimplify_init_constructor (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
5565 bool want_value
, bool notify_temp_creation
)
5567 tree object
, ctor
, type
;
5568 enum gimplify_status ret
;
5569 vec
<constructor_elt
, va_gc
> *elts
;
5570 bool cleared
= false;
5571 bool is_empty_ctor
= false;
5572 bool is_init_expr
= (TREE_CODE (*expr_p
) == INIT_EXPR
);
5574 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p
, 1)) == CONSTRUCTOR
);
5576 if (!notify_temp_creation
)
5578 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
5579 is_gimple_lvalue
, fb_lvalue
);
5580 if (ret
== GS_ERROR
)
5584 object
= TREE_OPERAND (*expr_p
, 0);
5585 ctor
= TREE_OPERAND (*expr_p
, 1)
5586 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p
, 1));
5587 type
= TREE_TYPE (ctor
);
5588 elts
= CONSTRUCTOR_ELTS (ctor
);
5591 switch (TREE_CODE (type
))
5595 case QUAL_UNION_TYPE
:
5598 /* Use readonly data for initializers of this or smaller size
5599 regardless of the num_nonzero_elements / num_unique_nonzero_elements
5601 const HOST_WIDE_INT min_unique_size
= 64;
5602 /* If num_nonzero_elements / num_unique_nonzero_elements ratio
5603 is smaller than this, use readonly data. */
5604 const int unique_nonzero_ratio
= 8;
5605 /* True if a single access of the object must be ensured. This is the
5606 case if the target is volatile, the type is non-addressable and more
5607 than one field need to be assigned. */
5608 const bool ensure_single_access
5609 = TREE_THIS_VOLATILE (object
)
5610 && !TREE_ADDRESSABLE (type
)
5611 && vec_safe_length (elts
) > 1;
5612 struct gimplify_init_ctor_preeval_data preeval_data
;
5613 HOST_WIDE_INT num_ctor_elements
, num_nonzero_elements
;
5614 HOST_WIDE_INT num_unique_nonzero_elements
;
5616 bool valid_const_initializer
;
5618 /* Aggregate types must lower constructors to initialization of
5619 individual elements. The exception is that a CONSTRUCTOR node
5620 with no elements indicates zero-initialization of the whole. */
5621 if (vec_safe_is_empty (elts
))
5623 if (notify_temp_creation
)
5626 /* The var will be initialized and so appear on lhs of
5627 assignment, it can't be TREE_READONLY anymore. */
5629 TREE_READONLY (object
) = 0;
5631 is_empty_ctor
= true;
5635 /* Fetch information about the constructor to direct later processing.
5636 We might want to make static versions of it in various cases, and
5637 can only do so if it known to be a valid constant initializer. */
5638 valid_const_initializer
5639 = categorize_ctor_elements (ctor
, &num_nonzero_elements
,
5640 &num_unique_nonzero_elements
,
5641 &num_ctor_elements
, &complete_p
);
5643 /* If a const aggregate variable is being initialized, then it
5644 should never be a lose to promote the variable to be static. */
5645 if (valid_const_initializer
5646 && num_nonzero_elements
> 1
5647 && TREE_READONLY (object
)
5649 && !DECL_REGISTER (object
)
5650 && (flag_merge_constants
>= 2 || !TREE_ADDRESSABLE (object
)
5651 || DECL_MERGEABLE (object
))
5652 /* For ctors that have many repeated nonzero elements
5653 represented through RANGE_EXPRs, prefer initializing
5654 those through runtime loops over copies of large amounts
5655 of data from readonly data section. */
5656 && (num_unique_nonzero_elements
5657 > num_nonzero_elements
/ unique_nonzero_ratio
5658 || ((unsigned HOST_WIDE_INT
) int_size_in_bytes (type
)
5659 <= (unsigned HOST_WIDE_INT
) min_unique_size
)))
5661 if (notify_temp_creation
)
5664 DECL_INITIAL (object
) = ctor
;
5665 TREE_STATIC (object
) = 1;
5666 if (!DECL_NAME (object
) || DECL_NAMELESS (object
))
5667 DECL_NAME (object
) = create_tmp_var_name ("C");
5668 walk_tree (&DECL_INITIAL (object
), force_labels_r
, NULL
, NULL
);
5670 /* ??? C++ doesn't automatically append a .<number> to the
5671 assembler name, and even when it does, it looks at FE private
5672 data structures to figure out what that number should be,
5673 which are not set for this variable. I suppose this is
5674 important for local statics for inline functions, which aren't
5675 "local" in the object file sense. So in order to get a unique
5676 TU-local symbol, we must invoke the lhd version now. */
5677 lhd_set_decl_assembler_name (object
);
5679 *expr_p
= NULL_TREE
;
5683 /* The var will be initialized and so appear on lhs of
5684 assignment, it can't be TREE_READONLY anymore. */
5685 if (VAR_P (object
) && !notify_temp_creation
)
5686 TREE_READONLY (object
) = 0;
5688 /* If there are "lots" of initialized elements, even discounting
5689 those that are not address constants (and thus *must* be
5690 computed at runtime), then partition the constructor into
5691 constant and non-constant parts. Block copy the constant
5692 parts in, then generate code for the non-constant parts. */
5693 /* TODO. There's code in cp/typeck.cc to do this. */
5695 if (int_size_in_bytes (TREE_TYPE (ctor
)) < 0)
5696 /* store_constructor will ignore the clearing of variable-sized
5697 objects. Initializers for such objects must explicitly set
5698 every field that needs to be set. */
5700 else if (!complete_p
)
5701 /* If the constructor isn't complete, clear the whole object
5702 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
5704 ??? This ought not to be needed. For any element not present
5705 in the initializer, we should simply set them to zero. Except
5706 we'd need to *find* the elements that are not present, and that
5707 requires trickery to avoid quadratic compile-time behavior in
5708 large cases or excessive memory use in small cases. */
5709 cleared
= !CONSTRUCTOR_NO_CLEARING (ctor
);
5710 else if (num_ctor_elements
- num_nonzero_elements
5711 > CLEAR_RATIO (optimize_function_for_speed_p (cfun
))
5712 && num_nonzero_elements
< num_ctor_elements
/ 4)
5713 /* If there are "lots" of zeros, it's more efficient to clear
5714 the memory and then set the nonzero elements. */
5716 else if (ensure_single_access
&& num_nonzero_elements
== 0)
5717 /* If a single access to the target must be ensured and all elements
5718 are zero, then it's optimal to clear whatever their number. */
5720 /* If the object is small enough to go in registers, and it's
5721 not required to be constructed in memory, clear it first.
5722 That will avoid wasting cycles preserving any padding bits
5723 that might be there, and if there aren't any, the compiler
5724 is smart enough to optimize the clearing out. */
5725 else if (complete_p
<= 0
5726 && !TREE_ADDRESSABLE (ctor
)
5727 && !TREE_THIS_VOLATILE (object
)
5728 && (TYPE_MODE (type
) != BLKmode
|| TYPE_NO_FORCE_BLK (type
))
5734 /* If there are "lots" of initialized elements, and all of them
5735 are valid address constants, then the entire initializer can
5736 be dropped to memory, and then memcpy'd out. Don't do this
5737 for sparse arrays, though, as it's more efficient to follow
5738 the standard CONSTRUCTOR behavior of memset followed by
5739 individual element initialization. Also don't do this for small
5740 all-zero initializers (which aren't big enough to merit
5741 clearing), and don't try to make bitwise copies of
5742 TREE_ADDRESSABLE types. */
5743 if (valid_const_initializer
5745 && !(cleared
|| num_nonzero_elements
== 0)
5746 && !TREE_ADDRESSABLE (type
))
5748 HOST_WIDE_INT size
= int_size_in_bytes (type
);
5751 /* ??? We can still get unbounded array types, at least
5752 from the C++ front end. This seems wrong, but attempt
5753 to work around it for now. */
5756 size
= int_size_in_bytes (TREE_TYPE (object
));
5758 TREE_TYPE (ctor
) = type
= TREE_TYPE (object
);
5761 /* Find the maximum alignment we can assume for the object. */
5762 /* ??? Make use of DECL_OFFSET_ALIGN. */
5763 if (DECL_P (object
))
5764 align
= DECL_ALIGN (object
);
5766 align
= TYPE_ALIGN (type
);
5768 /* Do a block move either if the size is so small as to make
5769 each individual move a sub-unit move on average, or if it
5770 is so large as to make individual moves inefficient. */
5772 && num_nonzero_elements
> 1
5773 /* For ctors that have many repeated nonzero elements
5774 represented through RANGE_EXPRs, prefer initializing
5775 those through runtime loops over copies of large amounts
5776 of data from readonly data section. */
5777 && (num_unique_nonzero_elements
5778 > num_nonzero_elements
/ unique_nonzero_ratio
5779 || size
<= min_unique_size
)
5780 && (size
< num_nonzero_elements
5781 || !can_move_by_pieces (size
, align
)))
5783 if (notify_temp_creation
)
5786 walk_tree (&ctor
, force_labels_r
, NULL
, NULL
);
5787 ctor
= tree_output_constant_def (ctor
);
5788 if (!useless_type_conversion_p (type
, TREE_TYPE (ctor
)))
5789 ctor
= build1 (VIEW_CONVERT_EXPR
, type
, ctor
);
5790 TREE_OPERAND (*expr_p
, 1) = ctor
;
5792 /* This is no longer an assignment of a CONSTRUCTOR, but
5793 we still may have processing to do on the LHS. So
5794 pretend we didn't do anything here to let that happen. */
5795 return GS_UNHANDLED
;
5799 /* If a single access to the target must be ensured and there are
5800 nonzero elements or the zero elements are not assigned en masse,
5801 initialize the target from a temporary. */
5802 if (ensure_single_access
&& (num_nonzero_elements
> 0 || !cleared
))
5804 if (notify_temp_creation
)
5807 tree temp
= create_tmp_var (TYPE_MAIN_VARIANT (type
));
5808 TREE_OPERAND (*expr_p
, 0) = temp
;
5809 *expr_p
= build2 (COMPOUND_EXPR
, TREE_TYPE (*expr_p
),
5811 build2 (MODIFY_EXPR
, void_type_node
,
5816 if (notify_temp_creation
)
5819 /* If there are nonzero elements and if needed, pre-evaluate to capture
5820 elements overlapping with the lhs into temporaries. We must do this
5821 before clearing to fetch the values before they are zeroed-out. */
5822 if (num_nonzero_elements
> 0 && TREE_CODE (*expr_p
) != INIT_EXPR
)
5824 preeval_data
.lhs_base_decl
= get_base_address (object
);
5825 if (!DECL_P (preeval_data
.lhs_base_decl
))
5826 preeval_data
.lhs_base_decl
= NULL
;
5827 preeval_data
.lhs_alias_set
= get_alias_set (object
);
5829 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p
, 1),
5830 pre_p
, post_p
, &preeval_data
);
5833 bool ctor_has_side_effects_p
5834 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p
, 1));
5838 /* Zap the CONSTRUCTOR element list, which simplifies this case.
5839 Note that we still have to gimplify, in order to handle the
5840 case of variable sized types. Avoid shared tree structures. */
5841 CONSTRUCTOR_ELTS (ctor
) = NULL
;
5842 TREE_SIDE_EFFECTS (ctor
) = 0;
5843 object
= unshare_expr (object
);
5844 gimplify_stmt (expr_p
, pre_p
);
5847 /* If we have not block cleared the object, or if there are nonzero
5848 elements in the constructor, or if the constructor has side effects,
5849 add assignments to the individual scalar fields of the object. */
5851 || num_nonzero_elements
> 0
5852 || ctor_has_side_effects_p
)
5853 gimplify_init_ctor_eval (object
, elts
, pre_p
, cleared
);
5855 *expr_p
= NULL_TREE
;
5863 if (notify_temp_creation
)
5866 /* Extract the real and imaginary parts out of the ctor. */
5867 gcc_assert (elts
->length () == 2);
5868 r
= (*elts
)[0].value
;
5869 i
= (*elts
)[1].value
;
5870 if (r
== NULL
|| i
== NULL
)
5872 tree zero
= build_zero_cst (TREE_TYPE (type
));
5879 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
5880 represent creation of a complex value. */
5881 if (TREE_CONSTANT (r
) && TREE_CONSTANT (i
))
5883 ctor
= build_complex (type
, r
, i
);
5884 TREE_OPERAND (*expr_p
, 1) = ctor
;
5888 ctor
= build2 (COMPLEX_EXPR
, type
, r
, i
);
5889 TREE_OPERAND (*expr_p
, 1) = ctor
;
5890 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1),
5893 rhs_predicate_for (TREE_OPERAND (*expr_p
, 0)),
5901 unsigned HOST_WIDE_INT ix
;
5902 constructor_elt
*ce
;
5904 if (notify_temp_creation
)
5907 /* Vector types use CONSTRUCTOR all the way through gimple
5908 compilation as a general initializer. */
5909 FOR_EACH_VEC_SAFE_ELT (elts
, ix
, ce
)
5911 enum gimplify_status tret
;
5912 tret
= gimplify_expr (&ce
->value
, pre_p
, post_p
, is_gimple_val
,
5914 if (tret
== GS_ERROR
)
5916 else if (TREE_STATIC (ctor
)
5917 && !initializer_constant_valid_p (ce
->value
,
5918 TREE_TYPE (ce
->value
)))
5919 TREE_STATIC (ctor
) = 0;
5921 recompute_constructor_flags (ctor
);
5923 /* Go ahead and simplify constant constructors to VECTOR_CST. */
5924 if (TREE_CONSTANT (ctor
))
5926 bool constant_p
= true;
5929 /* Even when ctor is constant, it might contain non-*_CST
5930 elements, such as addresses or trapping values like
5931 1.0/0.0 - 1.0/0.0. Such expressions don't belong
5932 in VECTOR_CST nodes. */
5933 FOR_EACH_CONSTRUCTOR_VALUE (elts
, ix
, value
)
5934 if (!CONSTANT_CLASS_P (value
))
5942 TREE_OPERAND (*expr_p
, 1) = build_vector_from_ctor (type
, elts
);
5947 if (!is_gimple_reg (TREE_OPERAND (*expr_p
, 0)))
5948 TREE_OPERAND (*expr_p
, 1) = get_formal_tmp_var (ctor
, pre_p
);
5953 /* So how did we get a CONSTRUCTOR for a scalar type? */
5957 if (ret
== GS_ERROR
)
5959 /* If we have gimplified both sides of the initializer but have
5960 not emitted an assignment, do so now. */
5962 /* If the type is an empty type, we don't need to emit the
5964 && !is_empty_type (TREE_TYPE (TREE_OPERAND (*expr_p
, 0))))
5966 tree lhs
= TREE_OPERAND (*expr_p
, 0);
5967 tree rhs
= TREE_OPERAND (*expr_p
, 1);
5968 if (want_value
&& object
== lhs
)
5969 lhs
= unshare_expr (lhs
);
5970 gassign
*init
= gimple_build_assign (lhs
, rhs
);
5971 gimplify_seq_add_stmt (pre_p
, init
);
5984 /* If the user requests to initialize automatic variables, we
5985 should initialize paddings inside the variable. Add a call to
5986 __builtin_clear_pading (&object, 0, for_auto_init = true) to
5987 initialize paddings of object always to zero regardless of
5988 INIT_TYPE. Note, we will not insert this call if the aggregate
5989 variable has be completely cleared already or it's initialized
5990 with an empty constructor. We cannot insert this call if the
5991 variable is a gimple register since __builtin_clear_padding will take
5992 the address of the variable. As a result, if a long double/_Complex long
5993 double variable will be spilled into stack later, its padding cannot
5994 be cleared with __builtin_clear_padding. We should clear its padding
5995 when it is spilled into memory. */
5997 && !is_gimple_reg (object
)
5998 && clear_padding_type_may_have_padding_p (type
)
5999 && ((AGGREGATE_TYPE_P (type
) && !cleared
&& !is_empty_ctor
)
6000 || !AGGREGATE_TYPE_P (type
))
6001 && is_var_need_auto_init (object
))
6002 gimple_add_padding_init_for_auto_var (object
, false, pre_p
);
6007 /* Given a pointer value OP0, return a simplified version of an
6008 indirection through OP0, or NULL_TREE if no simplification is
6009 possible. This may only be applied to a rhs of an expression.
6010 Note that the resulting type may be different from the type pointed
6011 to in the sense that it is still compatible from the langhooks
6015 gimple_fold_indirect_ref_rhs (tree t
)
6017 return gimple_fold_indirect_ref (t
);
6020 /* Subroutine of gimplify_modify_expr to do simplifications of
6021 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
6022 something changes. */
6024 static enum gimplify_status
6025 gimplify_modify_expr_rhs (tree
*expr_p
, tree
*from_p
, tree
*to_p
,
6026 gimple_seq
*pre_p
, gimple_seq
*post_p
,
6029 enum gimplify_status ret
= GS_UNHANDLED
;
6035 switch (TREE_CODE (*from_p
))
6038 /* If we're assigning from a read-only variable initialized with
6039 a constructor and not volatile, do the direct assignment from
6040 the constructor, but only if the target is not volatile either
6041 since this latter assignment might end up being done on a per
6042 field basis. However, if the target is volatile and the type
6043 is aggregate and non-addressable, gimplify_init_constructor
6044 knows that it needs to ensure a single access to the target
6045 and it will return GS_OK only in this case. */
6046 if (TREE_READONLY (*from_p
)
6047 && DECL_INITIAL (*from_p
)
6048 && TREE_CODE (DECL_INITIAL (*from_p
)) == CONSTRUCTOR
6049 && !TREE_THIS_VOLATILE (*from_p
)
6050 && (!TREE_THIS_VOLATILE (*to_p
)
6051 || (AGGREGATE_TYPE_P (TREE_TYPE (*to_p
))
6052 && !TREE_ADDRESSABLE (TREE_TYPE (*to_p
)))))
6054 tree old_from
= *from_p
;
6055 enum gimplify_status subret
;
6057 /* Move the constructor into the RHS. */
6058 *from_p
= unshare_expr (DECL_INITIAL (*from_p
));
6060 /* Let's see if gimplify_init_constructor will need to put
6062 subret
= gimplify_init_constructor (expr_p
, NULL
, NULL
,
6064 if (subret
== GS_ERROR
)
6066 /* If so, revert the change. */
6077 if (!TREE_ADDRESSABLE (TREE_TYPE (*from_p
)))
6078 /* If we have code like
6082 where the type of "x" is a (possibly cv-qualified variant
6083 of "A"), treat the entire expression as identical to "x".
6084 This kind of code arises in C++ when an object is bound
6085 to a const reference, and if "x" is a TARGET_EXPR we want
6086 to take advantage of the optimization below. But not if
6087 the type is TREE_ADDRESSABLE; then C++17 says that the
6088 TARGET_EXPR needs to be a temporary. */
6090 = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p
, 0)))
6092 bool volatile_p
= TREE_THIS_VOLATILE (*from_p
);
6093 if (TREE_THIS_VOLATILE (t
) != volatile_p
)
6096 t
= build_simple_mem_ref_loc (EXPR_LOCATION (*from_p
),
6097 build_fold_addr_expr (t
));
6098 if (REFERENCE_CLASS_P (t
))
6099 TREE_THIS_VOLATILE (t
) = volatile_p
;
6109 /* If we are initializing something from a TARGET_EXPR, strip the
6110 TARGET_EXPR and initialize it directly, if possible. This can't
6111 be done if the initializer is void, since that implies that the
6112 temporary is set in some non-trivial way.
6114 ??? What about code that pulls out the temp and uses it
6115 elsewhere? I think that such code never uses the TARGET_EXPR as
6116 an initializer. If I'm wrong, we'll die because the temp won't
6117 have any RTL. In that case, I guess we'll need to replace
6118 references somehow. */
6119 tree init
= TARGET_EXPR_INITIAL (*from_p
);
6122 && (TREE_CODE (*expr_p
) != MODIFY_EXPR
6123 || !TARGET_EXPR_NO_ELIDE (*from_p
))
6124 && !VOID_TYPE_P (TREE_TYPE (init
)))
6134 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
6136 gimplify_compound_expr (from_p
, pre_p
, true);
6142 /* If we already made some changes, let the front end have a
6143 crack at this before we break it down. */
6144 if (ret
!= GS_UNHANDLED
)
6147 /* If we're initializing from a CONSTRUCTOR, break this into
6148 individual MODIFY_EXPRs. */
6149 ret
= gimplify_init_constructor (expr_p
, pre_p
, post_p
, want_value
,
6154 /* If we're assigning to a non-register type, push the assignment
6155 down into the branches. This is mandatory for ADDRESSABLE types,
6156 since we cannot generate temporaries for such, but it saves a
6157 copy in other cases as well.
6158 Also avoid an extra temporary and copy when assigning to
6160 if (!is_gimple_reg_type (TREE_TYPE (*from_p
))
6161 || (is_gimple_reg (*to_p
) && !gimplify_ctxp
->allow_rhs_cond_expr
))
6163 /* This code should mirror the code in gimplify_cond_expr. */
6164 enum tree_code code
= TREE_CODE (*expr_p
);
6165 tree cond
= *from_p
;
6166 tree result
= *to_p
;
6168 ret
= gimplify_expr (&result
, pre_p
, post_p
,
6169 is_gimple_lvalue
, fb_lvalue
);
6170 if (ret
!= GS_ERROR
)
6173 /* If we are going to write RESULT more than once, clear
6174 TREE_READONLY flag, otherwise we might incorrectly promote
6175 the variable to static const and initialize it at compile
6176 time in one of the branches. */
6178 && TREE_TYPE (TREE_OPERAND (cond
, 1)) != void_type_node
6179 && TREE_TYPE (TREE_OPERAND (cond
, 2)) != void_type_node
)
6180 TREE_READONLY (result
) = 0;
6181 if (TREE_TYPE (TREE_OPERAND (cond
, 1)) != void_type_node
)
6182 TREE_OPERAND (cond
, 1)
6183 = build2 (code
, void_type_node
, result
,
6184 TREE_OPERAND (cond
, 1));
6185 if (TREE_TYPE (TREE_OPERAND (cond
, 2)) != void_type_node
)
6186 TREE_OPERAND (cond
, 2)
6187 = build2 (code
, void_type_node
, unshare_expr (result
),
6188 TREE_OPERAND (cond
, 2));
6190 TREE_TYPE (cond
) = void_type_node
;
6191 recalculate_side_effects (cond
);
6195 gimplify_and_add (cond
, pre_p
);
6196 *expr_p
= unshare_expr (result
);
6205 /* For calls that return in memory, give *to_p as the CALL_EXPR's
6206 return slot so that we don't generate a temporary. */
6207 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p
)
6208 && aggregate_value_p (*from_p
, *from_p
))
6212 if (!(rhs_predicate_for (*to_p
))(*from_p
))
6213 /* If we need a temporary, *to_p isn't accurate. */
6215 /* It's OK to use the return slot directly unless it's an NRV. */
6216 else if (TREE_CODE (*to_p
) == RESULT_DECL
6217 && DECL_NAME (*to_p
) == NULL_TREE
6218 && needs_to_live_in_memory (*to_p
))
6220 else if (is_gimple_reg_type (TREE_TYPE (*to_p
))
6221 || (DECL_P (*to_p
) && DECL_REGISTER (*to_p
)))
6222 /* Don't force regs into memory. */
6224 else if (TREE_CODE (*expr_p
) == INIT_EXPR
)
6225 /* It's OK to use the target directly if it's being
6228 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p
)))
6230 /* Always use the target and thus RSO for variable-sized types.
6231 GIMPLE cannot deal with a variable-sized assignment
6232 embedded in a call statement. */
6234 else if (TREE_CODE (*to_p
) != SSA_NAME
6235 && (!is_gimple_variable (*to_p
)
6236 || needs_to_live_in_memory (*to_p
)))
6237 /* Don't use the original target if it's already addressable;
6238 if its address escapes, and the called function uses the
6239 NRV optimization, a conforming program could see *to_p
6240 change before the called function returns; see c++/19317.
6241 When optimizing, the return_slot pass marks more functions
6242 as safe after we have escape info. */
6249 CALL_EXPR_RETURN_SLOT_OPT (*from_p
) = 1;
6250 mark_addressable (*to_p
);
6255 case WITH_SIZE_EXPR
:
6256 /* Likewise for calls that return an aggregate of non-constant size,
6257 since we would not be able to generate a temporary at all. */
6258 if (TREE_CODE (TREE_OPERAND (*from_p
, 0)) == CALL_EXPR
)
6260 *from_p
= TREE_OPERAND (*from_p
, 0);
6261 /* We don't change ret in this case because the
6262 WITH_SIZE_EXPR might have been added in
6263 gimplify_modify_expr, so returning GS_OK would lead to an
6269 /* If we're initializing from a container, push the initialization
6271 case CLEANUP_POINT_EXPR
:
6273 case STATEMENT_LIST
:
6275 tree wrap
= *from_p
;
6278 ret
= gimplify_expr (to_p
, pre_p
, post_p
, is_gimple_min_lval
,
6280 if (ret
!= GS_ERROR
)
6283 t
= voidify_wrapper_expr (wrap
, *expr_p
);
6284 gcc_assert (t
== *expr_p
);
6288 gimplify_and_add (wrap
, pre_p
);
6289 *expr_p
= unshare_expr (*to_p
);
6297 /* Pull out compound literal expressions from a NOP_EXPR.
6298 Those are created in the C FE to drop qualifiers during
6299 lvalue conversion. */
6300 if ((TREE_CODE (TREE_OPERAND (*from_p
, 0)) == COMPOUND_LITERAL_EXPR
)
6301 && tree_ssa_useless_type_conversion (*from_p
))
6303 *from_p
= TREE_OPERAND (*from_p
, 0);
6309 case COMPOUND_LITERAL_EXPR
:
6311 tree complit
= TREE_OPERAND (*expr_p
, 1);
6312 tree decl_s
= COMPOUND_LITERAL_EXPR_DECL_EXPR (complit
);
6313 tree decl
= DECL_EXPR_DECL (decl_s
);
6314 tree init
= DECL_INITIAL (decl
);
6316 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
6317 into struct T x = { 0, 1, 2 } if the address of the
6318 compound literal has never been taken. */
6319 if (!TREE_ADDRESSABLE (complit
)
6320 && !TREE_ADDRESSABLE (decl
)
6323 *expr_p
= copy_node (*expr_p
);
6324 TREE_OPERAND (*expr_p
, 1) = init
;
6339 /* Return true if T looks like a valid GIMPLE statement. */
6342 is_gimple_stmt (tree t
)
6344 const enum tree_code code
= TREE_CODE (t
);
6349 /* The only valid NOP_EXPR is the empty statement. */
6350 return IS_EMPTY_STMT (t
);
6354 /* These are only valid if they're void. */
6355 return TREE_TYPE (t
) == NULL
|| VOID_TYPE_P (TREE_TYPE (t
));
6361 case CASE_LABEL_EXPR
:
6362 case TRY_CATCH_EXPR
:
6363 case TRY_FINALLY_EXPR
:
6364 case EH_FILTER_EXPR
:
6367 case STATEMENT_LIST
:
6372 case OACC_HOST_DATA
:
6375 case OACC_ENTER_DATA
:
6376 case OACC_EXIT_DATA
:
6381 case OMP_DISTRIBUTE
:
6390 case OMP_STRUCTURED_BLOCK
:
6399 case OMP_TARGET_DATA
:
6400 case OMP_TARGET_UPDATE
:
6401 case OMP_TARGET_ENTER_DATA
:
6402 case OMP_TARGET_EXIT_DATA
:
6405 /* These are always void. */
6411 /* These are valid regardless of their type. */
6420 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
6421 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a gimple register.
6423 IMPORTANT NOTE: This promotion is performed by introducing a load of the
6424 other, unmodified part of the complex object just before the total store.
6425 As a consequence, if the object is still uninitialized, an undefined value
6426 will be loaded into a register, which may result in a spurious exception
6427 if the register is floating-point and the value happens to be a signaling
6428 NaN for example. Then the fully-fledged complex operations lowering pass
6429 followed by a DCE pass are necessary in order to fix things up. */
6431 static enum gimplify_status
6432 gimplify_modify_expr_complex_part (tree
*expr_p
, gimple_seq
*pre_p
,
6435 enum tree_code code
, ocode
;
6436 tree lhs
, rhs
, new_rhs
, other
, realpart
, imagpart
;
6438 lhs
= TREE_OPERAND (*expr_p
, 0);
6439 rhs
= TREE_OPERAND (*expr_p
, 1);
6440 code
= TREE_CODE (lhs
);
6441 lhs
= TREE_OPERAND (lhs
, 0);
6443 ocode
= code
== REALPART_EXPR
? IMAGPART_EXPR
: REALPART_EXPR
;
6444 other
= build1 (ocode
, TREE_TYPE (rhs
), lhs
);
6445 suppress_warning (other
);
6446 other
= get_formal_tmp_var (other
, pre_p
);
6448 realpart
= code
== REALPART_EXPR
? rhs
: other
;
6449 imagpart
= code
== REALPART_EXPR
? other
: rhs
;
6451 if (TREE_CONSTANT (realpart
) && TREE_CONSTANT (imagpart
))
6452 new_rhs
= build_complex (TREE_TYPE (lhs
), realpart
, imagpart
);
6454 new_rhs
= build2 (COMPLEX_EXPR
, TREE_TYPE (lhs
), realpart
, imagpart
);
6456 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (lhs
, new_rhs
));
6457 *expr_p
= (want_value
) ? rhs
: NULL_TREE
;
6462 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
6468 PRE_P points to the list where side effects that must happen before
6469 *EXPR_P should be stored.
6471 POST_P points to the list where side effects that must happen after
6472 *EXPR_P should be stored.
6474 WANT_VALUE is nonzero iff we want to use the value of this expression
6475 in another expression. */
6477 static enum gimplify_status
6478 gimplify_modify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
6481 tree
*from_p
= &TREE_OPERAND (*expr_p
, 1);
6482 tree
*to_p
= &TREE_OPERAND (*expr_p
, 0);
6483 enum gimplify_status ret
= GS_UNHANDLED
;
6485 location_t loc
= EXPR_LOCATION (*expr_p
);
6486 gimple_stmt_iterator gsi
;
6488 if (error_operand_p (*from_p
) || error_operand_p (*to_p
))
6491 gcc_assert (TREE_CODE (*expr_p
) == MODIFY_EXPR
6492 || TREE_CODE (*expr_p
) == INIT_EXPR
);
6494 /* Trying to simplify a clobber using normal logic doesn't work,
6495 so handle it here. */
6496 if (TREE_CLOBBER_P (*from_p
))
6498 ret
= gimplify_expr (to_p
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
6499 if (ret
== GS_ERROR
)
6501 gcc_assert (!want_value
);
6502 if (!VAR_P (*to_p
) && TREE_CODE (*to_p
) != MEM_REF
)
6504 tree addr
= get_initialized_tmp_var (build_fold_addr_expr (*to_p
),
6506 *to_p
= build_simple_mem_ref_loc (EXPR_LOCATION (*to_p
), addr
);
6508 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (*to_p
, *from_p
));
6513 /* Convert initialization from an empty variable-size CONSTRUCTOR to
6515 if (TREE_TYPE (*from_p
) != error_mark_node
6516 && TYPE_SIZE_UNIT (TREE_TYPE (*from_p
))
6517 && !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (*from_p
)))
6518 && TREE_CODE (*from_p
) == CONSTRUCTOR
6519 && CONSTRUCTOR_NELTS (*from_p
) == 0)
6521 maybe_with_size_expr (from_p
);
6522 gcc_assert (TREE_CODE (*from_p
) == WITH_SIZE_EXPR
);
6523 return gimplify_modify_expr_to_memset (expr_p
,
6524 TREE_OPERAND (*from_p
, 1),
6528 /* Insert pointer conversions required by the middle-end that are not
6529 required by the frontend. This fixes middle-end type checking for
6530 for example gcc.dg/redecl-6.c. */
6531 if (POINTER_TYPE_P (TREE_TYPE (*to_p
)))
6533 STRIP_USELESS_TYPE_CONVERSION (*from_p
);
6534 if (!useless_type_conversion_p (TREE_TYPE (*to_p
), TREE_TYPE (*from_p
)))
6535 *from_p
= fold_convert_loc (loc
, TREE_TYPE (*to_p
), *from_p
);
6538 /* See if any simplifications can be done based on what the RHS is. */
6539 ret
= gimplify_modify_expr_rhs (expr_p
, from_p
, to_p
, pre_p
, post_p
,
6541 if (ret
!= GS_UNHANDLED
)
6544 /* For empty types only gimplify the left hand side and right hand
6545 side as statements and throw away the assignment. Do this after
6546 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
6548 if (is_empty_type (TREE_TYPE (*from_p
))
6550 /* Don't do this for calls that return addressable types, expand_call
6551 relies on those having a lhs. */
6552 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p
))
6553 && TREE_CODE (*from_p
) == CALL_EXPR
))
6555 gimplify_stmt (from_p
, pre_p
);
6556 gimplify_stmt (to_p
, pre_p
);
6557 *expr_p
= NULL_TREE
;
6561 /* If the value being copied is of variable width, compute the length
6562 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
6563 before gimplifying any of the operands so that we can resolve any
6564 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
6565 the size of the expression to be copied, not of the destination, so
6566 that is what we must do here. */
6567 maybe_with_size_expr (from_p
);
6569 /* As a special case, we have to temporarily allow for assignments
6570 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
6571 a toplevel statement, when gimplifying the GENERIC expression
6572 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
6573 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
6575 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
6576 prevent gimplify_expr from trying to create a new temporary for
6577 foo's LHS, we tell it that it should only gimplify until it
6578 reaches the CALL_EXPR. On return from gimplify_expr, the newly
6579 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
6580 and all we need to do here is set 'a' to be its LHS. */
6582 /* Gimplify the RHS first for C++17 and bug 71104. */
6583 gimple_predicate initial_pred
= initial_rhs_predicate_for (*to_p
);
6584 ret
= gimplify_expr (from_p
, pre_p
, post_p
, initial_pred
, fb_rvalue
);
6585 if (ret
== GS_ERROR
)
6588 /* Then gimplify the LHS. */
6589 /* If we gimplified the RHS to a CALL_EXPR and that call may return
6590 twice we have to make sure to gimplify into non-SSA as otherwise
6591 the abnormal edge added later will make those defs not dominate
6593 ??? Technically this applies only to the registers used in the
6594 resulting non-register *TO_P. */
6595 bool saved_into_ssa
= gimplify_ctxp
->into_ssa
;
6597 && TREE_CODE (*from_p
) == CALL_EXPR
6598 && call_expr_flags (*from_p
) & ECF_RETURNS_TWICE
)
6599 gimplify_ctxp
->into_ssa
= false;
6600 ret
= gimplify_expr (to_p
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
6601 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
6602 if (ret
== GS_ERROR
)
6605 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
6606 guess for the predicate was wrong. */
6607 gimple_predicate final_pred
= rhs_predicate_for (*to_p
);
6608 if (final_pred
!= initial_pred
)
6610 ret
= gimplify_expr (from_p
, pre_p
, post_p
, final_pred
, fb_rvalue
);
6611 if (ret
== GS_ERROR
)
6615 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
6616 size as argument to the call. */
6617 if (TREE_CODE (*from_p
) == WITH_SIZE_EXPR
)
6619 tree call
= TREE_OPERAND (*from_p
, 0);
6620 tree vlasize
= TREE_OPERAND (*from_p
, 1);
6622 if (TREE_CODE (call
) == CALL_EXPR
6623 && CALL_EXPR_IFN (call
) == IFN_VA_ARG
)
6625 int nargs
= call_expr_nargs (call
);
6626 tree type
= TREE_TYPE (call
);
6627 tree ap
= CALL_EXPR_ARG (call
, 0);
6628 tree tag
= CALL_EXPR_ARG (call
, 1);
6629 tree aptag
= CALL_EXPR_ARG (call
, 2);
6630 tree newcall
= build_call_expr_internal_loc (EXPR_LOCATION (call
),
6634 TREE_OPERAND (*from_p
, 0) = newcall
;
6638 /* Now see if the above changed *from_p to something we handle specially. */
6639 ret
= gimplify_modify_expr_rhs (expr_p
, from_p
, to_p
, pre_p
, post_p
,
6641 if (ret
!= GS_UNHANDLED
)
6644 /* If we've got a variable sized assignment between two lvalues (i.e. does
6645 not involve a call), then we can make things a bit more straightforward
6646 by converting the assignment to memcpy or memset. */
6647 if (TREE_CODE (*from_p
) == WITH_SIZE_EXPR
)
6649 tree from
= TREE_OPERAND (*from_p
, 0);
6650 tree size
= TREE_OPERAND (*from_p
, 1);
6652 if (TREE_CODE (from
) == CONSTRUCTOR
)
6653 return gimplify_modify_expr_to_memset (expr_p
, size
, want_value
, pre_p
);
6654 else if (is_gimple_addressable (from
)
6655 && ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (*to_p
)))
6656 && ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (from
))))
6659 return gimplify_modify_expr_to_memcpy (expr_p
, size
, want_value
,
6664 /* Transform partial stores to non-addressable complex variables into
6665 total stores. This allows us to use real instead of virtual operands
6666 for these variables, which improves optimization. */
6667 if ((TREE_CODE (*to_p
) == REALPART_EXPR
6668 || TREE_CODE (*to_p
) == IMAGPART_EXPR
)
6669 && is_gimple_reg (TREE_OPERAND (*to_p
, 0)))
6670 return gimplify_modify_expr_complex_part (expr_p
, pre_p
, want_value
);
6672 /* Try to alleviate the effects of the gimplification creating artificial
6673 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
6674 make sure not to create DECL_DEBUG_EXPR links across functions. */
6675 if (!gimplify_ctxp
->into_ssa
6677 && DECL_IGNORED_P (*from_p
)
6679 && !DECL_IGNORED_P (*to_p
)
6680 && decl_function_context (*to_p
) == current_function_decl
6681 && decl_function_context (*from_p
) == current_function_decl
)
6683 if (!DECL_NAME (*from_p
) && DECL_NAME (*to_p
))
6685 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p
)));
6686 DECL_HAS_DEBUG_EXPR_P (*from_p
) = 1;
6687 SET_DECL_DEBUG_EXPR (*from_p
, *to_p
);
6690 if (want_value
&& TREE_THIS_VOLATILE (*to_p
))
6691 *from_p
= get_initialized_tmp_var (*from_p
, pre_p
, post_p
);
6693 if (TREE_CODE (*from_p
) == CALL_EXPR
)
6695 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
6696 instead of a GIMPLE_ASSIGN. */
6698 if (CALL_EXPR_FN (*from_p
) == NULL_TREE
)
6700 /* Gimplify internal functions created in the FEs. */
6701 int nargs
= call_expr_nargs (*from_p
), i
;
6702 enum internal_fn ifn
= CALL_EXPR_IFN (*from_p
);
6703 auto_vec
<tree
> vargs (nargs
);
6705 for (i
= 0; i
< nargs
; i
++)
6707 gimplify_arg (&CALL_EXPR_ARG (*from_p
, i
), pre_p
,
6708 EXPR_LOCATION (*from_p
));
6709 vargs
.quick_push (CALL_EXPR_ARG (*from_p
, i
));
6711 call_stmt
= gimple_build_call_internal_vec (ifn
, vargs
);
6712 gimple_call_set_nothrow (call_stmt
, TREE_NOTHROW (*from_p
));
6713 gimple_set_location (call_stmt
, EXPR_LOCATION (*expr_p
));
6717 tree fnptrtype
= TREE_TYPE (CALL_EXPR_FN (*from_p
));
6718 CALL_EXPR_FN (*from_p
) = TREE_OPERAND (CALL_EXPR_FN (*from_p
), 0);
6719 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p
));
6720 tree fndecl
= get_callee_fndecl (*from_p
);
6722 && fndecl_built_in_p (fndecl
, BUILT_IN_EXPECT
)
6723 && call_expr_nargs (*from_p
) == 3)
6724 call_stmt
= gimple_build_call_internal (IFN_BUILTIN_EXPECT
, 3,
6725 CALL_EXPR_ARG (*from_p
, 0),
6726 CALL_EXPR_ARG (*from_p
, 1),
6727 CALL_EXPR_ARG (*from_p
, 2));
6730 call_stmt
= gimple_build_call_from_tree (*from_p
, fnptrtype
);
6733 notice_special_calls (call_stmt
);
6734 if (!gimple_call_noreturn_p (call_stmt
) || !should_remove_lhs_p (*to_p
))
6735 gimple_call_set_lhs (call_stmt
, *to_p
);
6736 else if (TREE_CODE (*to_p
) == SSA_NAME
)
6737 /* The above is somewhat premature, avoid ICEing later for a
6738 SSA name w/o a definition. We may have uses in the GIMPLE IL.
6739 ??? This doesn't make it a default-def. */
6740 SSA_NAME_DEF_STMT (*to_p
) = gimple_build_nop ();
6746 assign
= gimple_build_assign (*to_p
, *from_p
);
6747 gimple_set_location (assign
, EXPR_LOCATION (*expr_p
));
6748 if (COMPARISON_CLASS_P (*from_p
))
6749 copy_warning (assign
, *from_p
);
6752 if (gimplify_ctxp
->into_ssa
&& is_gimple_reg (*to_p
))
6754 /* We should have got an SSA name from the start. */
6755 gcc_assert (TREE_CODE (*to_p
) == SSA_NAME
6756 || ! gimple_in_ssa_p (cfun
));
6759 gimplify_seq_add_stmt (pre_p
, assign
);
6760 gsi
= gsi_last (*pre_p
);
6761 maybe_fold_stmt (&gsi
);
6765 *expr_p
= TREE_THIS_VOLATILE (*to_p
) ? *from_p
: unshare_expr (*to_p
);
6774 /* Gimplify a comparison between two variable-sized objects. Do this
6775 with a call to BUILT_IN_MEMCMP. */
6777 static enum gimplify_status
6778 gimplify_variable_sized_compare (tree
*expr_p
)
6780 location_t loc
= EXPR_LOCATION (*expr_p
);
6781 tree op0
= TREE_OPERAND (*expr_p
, 0);
6782 tree op1
= TREE_OPERAND (*expr_p
, 1);
6783 tree t
, arg
, dest
, src
, expr
;
6785 arg
= TYPE_SIZE_UNIT (TREE_TYPE (op0
));
6786 arg
= unshare_expr (arg
);
6787 arg
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg
, op0
);
6788 src
= build_fold_addr_expr_loc (loc
, op1
);
6789 dest
= build_fold_addr_expr_loc (loc
, op0
);
6790 t
= builtin_decl_implicit (BUILT_IN_MEMCMP
);
6791 t
= build_call_expr_loc (loc
, t
, 3, dest
, src
, arg
);
6794 = build2 (TREE_CODE (*expr_p
), TREE_TYPE (*expr_p
), t
, integer_zero_node
);
6795 SET_EXPR_LOCATION (expr
, loc
);
6801 /* Gimplify a comparison between two aggregate objects of integral scalar
6802 mode as a comparison between the bitwise equivalent scalar values. */
6804 static enum gimplify_status
6805 gimplify_scalar_mode_aggregate_compare (tree
*expr_p
)
6807 const location_t loc
= EXPR_LOCATION (*expr_p
);
6808 const enum tree_code code
= TREE_CODE (*expr_p
);
6809 tree op0
= TREE_OPERAND (*expr_p
, 0);
6810 tree op1
= TREE_OPERAND (*expr_p
, 1);
6811 tree type
= TREE_TYPE (op0
);
6812 tree scalar_type
= lang_hooks
.types
.type_for_mode (TYPE_MODE (type
), 1);
6814 op0
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, scalar_type
, op0
);
6815 op1
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, scalar_type
, op1
);
6817 /* We need to perform ordering comparisons in memory order like memcmp and,
6818 therefore, may need to byte-swap operands for little-endian targets. */
6819 if (code
!= EQ_EXPR
&& code
!= NE_EXPR
)
6821 gcc_assert (BYTES_BIG_ENDIAN
== WORDS_BIG_ENDIAN
);
6822 gcc_assert (TREE_CODE (scalar_type
) == INTEGER_TYPE
);
6825 if (BYTES_BIG_ENDIAN
)
6828 switch (int_size_in_bytes (scalar_type
))
6834 fndecl
= builtin_decl_implicit (BUILT_IN_BSWAP16
);
6837 fndecl
= builtin_decl_implicit (BUILT_IN_BSWAP32
);
6840 fndecl
= builtin_decl_implicit (BUILT_IN_BSWAP64
);
6843 fndecl
= builtin_decl_implicit (BUILT_IN_BSWAP128
);
6851 op0
= build_call_expr_loc (loc
, fndecl
, 1, op0
);
6852 op1
= build_call_expr_loc (loc
, fndecl
, 1, op1
);
6856 *expr_p
= fold_build2_loc (loc
, code
, TREE_TYPE (*expr_p
), op0
, op1
);
6861 /* Gimplify an expression sequence. This function gimplifies each
6862 expression and rewrites the original expression with the last
6863 expression of the sequence in GIMPLE form.
6865 PRE_P points to the list where the side effects for all the
6866 expressions in the sequence will be emitted.
6868 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
6870 static enum gimplify_status
6871 gimplify_compound_expr (tree
*expr_p
, gimple_seq
*pre_p
, bool want_value
)
6877 tree
*sub_p
= &TREE_OPERAND (t
, 0);
6879 if (TREE_CODE (*sub_p
) == COMPOUND_EXPR
)
6880 gimplify_compound_expr (sub_p
, pre_p
, false);
6882 gimplify_stmt (sub_p
, pre_p
);
6884 t
= TREE_OPERAND (t
, 1);
6886 while (TREE_CODE (t
) == COMPOUND_EXPR
);
6893 gimplify_stmt (expr_p
, pre_p
);
6898 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
6899 gimplify. After gimplification, EXPR_P will point to a new temporary
6900 that holds the original value of the SAVE_EXPR node.
6902 PRE_P points to the list where side effects that must happen before
6903 *EXPR_P should be stored. */
6905 static enum gimplify_status
6906 gimplify_save_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
6908 enum gimplify_status ret
= GS_ALL_DONE
;
6911 gcc_assert (TREE_CODE (*expr_p
) == SAVE_EXPR
);
6912 val
= TREE_OPERAND (*expr_p
, 0);
6914 if (val
&& TREE_TYPE (val
) == error_mark_node
)
6917 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
6918 if (!SAVE_EXPR_RESOLVED_P (*expr_p
))
6920 /* The operand may be a void-valued expression. It is
6921 being executed only for its side-effects. */
6922 if (TREE_TYPE (val
) == void_type_node
)
6924 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
6925 is_gimple_stmt
, fb_none
);
6929 /* The temporary may not be an SSA name as later abnormal and EH
6930 control flow may invalidate use/def domination. When in SSA
6931 form then assume there are no such issues and SAVE_EXPRs only
6932 appear via GENERIC foldings. */
6933 val
= get_initialized_tmp_var (val
, pre_p
, post_p
,
6934 gimple_in_ssa_p (cfun
));
6936 TREE_OPERAND (*expr_p
, 0) = val
;
6937 SAVE_EXPR_RESOLVED_P (*expr_p
) = 1;
6945 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
6952 PRE_P points to the list where side effects that must happen before
6953 *EXPR_P should be stored.
6955 POST_P points to the list where side effects that must happen after
6956 *EXPR_P should be stored. */
6958 static enum gimplify_status
6959 gimplify_addr_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
6961 tree expr
= *expr_p
;
6962 tree op0
= TREE_OPERAND (expr
, 0);
6963 enum gimplify_status ret
;
6964 location_t loc
= EXPR_LOCATION (*expr_p
);
6966 switch (TREE_CODE (op0
))
6970 /* Check if we are dealing with an expression of the form '&*ptr'.
6971 While the front end folds away '&*ptr' into 'ptr', these
6972 expressions may be generated internally by the compiler (e.g.,
6973 builtins like __builtin_va_end). */
6974 /* Caution: the silent array decomposition semantics we allow for
6975 ADDR_EXPR means we can't always discard the pair. */
6976 /* Gimplification of the ADDR_EXPR operand may drop
6977 cv-qualification conversions, so make sure we add them if
6980 tree op00
= TREE_OPERAND (op0
, 0);
6981 tree t_expr
= TREE_TYPE (expr
);
6982 tree t_op00
= TREE_TYPE (op00
);
6984 if (!useless_type_conversion_p (t_expr
, t_op00
))
6985 op00
= fold_convert_loc (loc
, TREE_TYPE (expr
), op00
);
6991 case VIEW_CONVERT_EXPR
:
6992 /* Take the address of our operand and then convert it to the type of
6995 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
6996 all clear. The impact of this transformation is even less clear. */
6998 /* If the operand is a useless conversion, look through it. Doing so
6999 guarantees that the ADDR_EXPR and its operand will remain of the
7001 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0
, 0)))
7002 op0
= TREE_OPERAND (op0
, 0);
7004 *expr_p
= fold_convert_loc (loc
, TREE_TYPE (expr
),
7005 build_fold_addr_expr_loc (loc
,
7006 TREE_OPERAND (op0
, 0)));
7011 if (integer_zerop (TREE_OPERAND (op0
, 1)))
7012 goto do_indirect_ref
;
7017 /* If we see a call to a declared builtin or see its address
7018 being taken (we can unify those cases here) then we can mark
7019 the builtin for implicit generation by GCC. */
7020 if (TREE_CODE (op0
) == FUNCTION_DECL
7021 && fndecl_built_in_p (op0
, BUILT_IN_NORMAL
)
7022 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0
)))
7023 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0
), true);
7025 /* We use fb_either here because the C frontend sometimes takes
7026 the address of a call that returns a struct; see
7027 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
7028 the implied temporary explicit. */
7030 /* Make the operand addressable. */
7031 ret
= gimplify_expr (&TREE_OPERAND (expr
, 0), pre_p
, post_p
,
7032 is_gimple_addressable
, fb_either
);
7033 if (ret
== GS_ERROR
)
7036 /* Then mark it. Beware that it may not be possible to do so directly
7037 if a temporary has been created by the gimplification. */
7038 prepare_gimple_addressable (&TREE_OPERAND (expr
, 0), pre_p
);
7040 op0
= TREE_OPERAND (expr
, 0);
7042 /* For various reasons, the gimplification of the expression
7043 may have made a new INDIRECT_REF. */
7044 if (INDIRECT_REF_P (op0
)
7045 || (TREE_CODE (op0
) == MEM_REF
7046 && integer_zerop (TREE_OPERAND (op0
, 1))))
7047 goto do_indirect_ref
;
7049 mark_addressable (TREE_OPERAND (expr
, 0));
7051 /* The FEs may end up building ADDR_EXPRs early on a decl with
7052 an incomplete type. Re-build ADDR_EXPRs in canonical form
7054 if (!types_compatible_p (TREE_TYPE (op0
), TREE_TYPE (TREE_TYPE (expr
))))
7055 *expr_p
= build_fold_addr_expr (op0
);
7057 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
7058 if (TREE_CODE (*expr_p
) == ADDR_EXPR
)
7059 recompute_tree_invariant_for_addr_expr (*expr_p
);
7061 /* If we re-built the ADDR_EXPR add a conversion to the original type
7063 if (!useless_type_conversion_p (TREE_TYPE (expr
), TREE_TYPE (*expr_p
)))
7064 *expr_p
= fold_convert (TREE_TYPE (expr
), *expr_p
);
7072 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
7073 value; output operands should be a gimple lvalue. */
7075 static enum gimplify_status
7076 gimplify_asm_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
7080 const char **oconstraints
;
7083 const char *constraint
;
7084 bool allows_mem
, allows_reg
, is_inout
;
7085 enum gimplify_status ret
, tret
;
7087 vec
<tree
, va_gc
> *inputs
;
7088 vec
<tree
, va_gc
> *outputs
;
7089 vec
<tree
, va_gc
> *clobbers
;
7090 vec
<tree
, va_gc
> *labels
;
7094 noutputs
= list_length (ASM_OUTPUTS (expr
));
7095 oconstraints
= (const char **) alloca ((noutputs
) * sizeof (const char *));
7103 link_next
= NULL_TREE
;
7104 for (i
= 0, link
= ASM_OUTPUTS (expr
); link
; ++i
, link
= link_next
)
7107 size_t constraint_len
;
7109 link_next
= TREE_CHAIN (link
);
7113 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
7114 constraint_len
= strlen (constraint
);
7115 if (constraint_len
== 0)
7118 ok
= parse_output_constraint (&constraint
, i
, 0, 0,
7119 &allows_mem
, &allows_reg
, &is_inout
);
7126 /* If we can't make copies, we can only accept memory.
7127 Similarly for VLAs. */
7128 tree outtype
= TREE_TYPE (TREE_VALUE (link
));
7129 if (outtype
!= error_mark_node
7130 && (TREE_ADDRESSABLE (outtype
)
7131 || !COMPLETE_TYPE_P (outtype
)
7132 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (outtype
))))
7138 error ("impossible constraint in %<asm%>");
7139 error ("non-memory output %d must stay in memory", i
);
7144 if (!allows_reg
&& allows_mem
)
7145 mark_addressable (TREE_VALUE (link
));
7147 tree orig
= TREE_VALUE (link
);
7148 tret
= gimplify_expr (&TREE_VALUE (link
), pre_p
, post_p
,
7149 is_inout
? is_gimple_min_lval
: is_gimple_lvalue
,
7150 fb_lvalue
| fb_mayfail
);
7151 if (tret
== GS_ERROR
)
7153 if (orig
!= error_mark_node
)
7154 error ("invalid lvalue in %<asm%> output %d", i
);
7158 /* If the gimplified operand is a register we do not allow memory. */
7161 && (is_gimple_reg (TREE_VALUE (link
))
7162 || (handled_component_p (TREE_VALUE (link
))
7163 && is_gimple_reg (TREE_OPERAND (TREE_VALUE (link
), 0)))))
7166 /* If the constraint does not allow memory make sure we gimplify
7167 it to a register if it is not already but its base is. This
7168 happens for complex and vector components. */
7171 tree op
= TREE_VALUE (link
);
7172 if (! is_gimple_val (op
)
7173 && is_gimple_reg_type (TREE_TYPE (op
))
7174 && is_gimple_reg (get_base_address (op
)))
7176 tree tem
= create_tmp_reg (TREE_TYPE (op
));
7180 ass
= build2 (MODIFY_EXPR
, TREE_TYPE (tem
),
7181 tem
, unshare_expr (op
));
7182 gimplify_and_add (ass
, pre_p
);
7184 ass
= build2 (MODIFY_EXPR
, TREE_TYPE (tem
), op
, tem
);
7185 gimplify_and_add (ass
, post_p
);
7187 TREE_VALUE (link
) = tem
;
7192 vec_safe_push (outputs
, link
);
7193 TREE_CHAIN (link
) = NULL_TREE
;
7197 /* An input/output operand. To give the optimizers more
7198 flexibility, split it into separate input and output
7201 /* Buffer big enough to format a 32-bit UINT_MAX into. */
7204 /* Turn the in/out constraint into an output constraint. */
7205 char *p
= xstrdup (constraint
);
7207 TREE_VALUE (TREE_PURPOSE (link
)) = build_string (constraint_len
, p
);
7209 /* And add a matching input constraint. */
7212 sprintf (buf
, "%u", i
);
7214 /* If there are multiple alternatives in the constraint,
7215 handle each of them individually. Those that allow register
7216 will be replaced with operand number, the others will stay
7218 if (strchr (p
, ',') != NULL
)
7220 size_t len
= 0, buflen
= strlen (buf
);
7221 char *beg
, *end
, *str
, *dst
;
7225 end
= strchr (beg
, ',');
7227 end
= strchr (beg
, '\0');
7228 if ((size_t) (end
- beg
) < buflen
)
7231 len
+= end
- beg
+ 1;
7238 str
= (char *) alloca (len
);
7239 for (beg
= p
+ 1, dst
= str
;;)
7242 bool mem_p
, reg_p
, inout_p
;
7244 end
= strchr (beg
, ',');
7249 parse_output_constraint (&tem
, i
, 0, 0,
7250 &mem_p
, ®_p
, &inout_p
);
7255 memcpy (dst
, buf
, buflen
);
7264 memcpy (dst
, beg
, len
);
7273 input
= build_string (dst
- str
, str
);
7276 input
= build_string (strlen (buf
), buf
);
7279 input
= build_string (constraint_len
- 1, constraint
+ 1);
7283 input
= build_tree_list (build_tree_list (NULL_TREE
, input
),
7284 unshare_expr (TREE_VALUE (link
)));
7285 ASM_INPUTS (expr
) = chainon (ASM_INPUTS (expr
), input
);
7289 link_next
= NULL_TREE
;
7290 for (link
= ASM_INPUTS (expr
); link
; ++i
, link
= link_next
)
7292 link_next
= TREE_CHAIN (link
);
7293 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
7294 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
7295 oconstraints
, &allows_mem
, &allows_reg
);
7297 /* If we can't make copies, we can only accept memory. */
7298 tree intype
= TREE_TYPE (TREE_VALUE (link
));
7299 if (intype
!= error_mark_node
7300 && (TREE_ADDRESSABLE (intype
)
7301 || !COMPLETE_TYPE_P (intype
)
7302 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (intype
))))
7308 error ("impossible constraint in %<asm%>");
7309 error ("non-memory input %d must stay in memory", i
);
7314 /* If the operand is a memory input, it should be an lvalue. */
7315 if (!allows_reg
&& allows_mem
)
7317 tree inputv
= TREE_VALUE (link
);
7318 STRIP_NOPS (inputv
);
7319 if (TREE_CODE (inputv
) == PREDECREMENT_EXPR
7320 || TREE_CODE (inputv
) == PREINCREMENT_EXPR
7321 || TREE_CODE (inputv
) == POSTDECREMENT_EXPR
7322 || TREE_CODE (inputv
) == POSTINCREMENT_EXPR
7323 || TREE_CODE (inputv
) == MODIFY_EXPR
)
7324 TREE_VALUE (link
) = error_mark_node
;
7325 tret
= gimplify_expr (&TREE_VALUE (link
), pre_p
, post_p
,
7326 is_gimple_lvalue
, fb_lvalue
| fb_mayfail
);
7327 if (tret
!= GS_ERROR
)
7329 /* Unlike output operands, memory inputs are not guaranteed
7330 to be lvalues by the FE, and while the expressions are
7331 marked addressable there, if it is e.g. a statement
7332 expression, temporaries in it might not end up being
7333 addressable. They might be already used in the IL and thus
7334 it is too late to make them addressable now though. */
7335 tree x
= TREE_VALUE (link
);
7336 while (handled_component_p (x
))
7337 x
= TREE_OPERAND (x
, 0);
7338 if (TREE_CODE (x
) == MEM_REF
7339 && TREE_CODE (TREE_OPERAND (x
, 0)) == ADDR_EXPR
)
7340 x
= TREE_OPERAND (TREE_OPERAND (x
, 0), 0);
7342 || TREE_CODE (x
) == PARM_DECL
7343 || TREE_CODE (x
) == RESULT_DECL
)
7344 && !TREE_ADDRESSABLE (x
)
7345 && is_gimple_reg (x
))
7347 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link
),
7349 "memory input %d is not directly addressable",
7351 prepare_gimple_addressable (&TREE_VALUE (link
), pre_p
);
7354 mark_addressable (TREE_VALUE (link
));
7355 if (tret
== GS_ERROR
)
7357 if (inputv
!= error_mark_node
)
7358 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link
), input_location
),
7359 "memory input %d is not directly addressable", i
);
7365 tret
= gimplify_expr (&TREE_VALUE (link
), pre_p
, post_p
,
7366 is_gimple_asm_val
, fb_rvalue
);
7367 if (tret
== GS_ERROR
)
7371 TREE_CHAIN (link
) = NULL_TREE
;
7372 vec_safe_push (inputs
, link
);
7375 link_next
= NULL_TREE
;
7376 for (link
= ASM_CLOBBERS (expr
); link
; ++i
, link
= link_next
)
7378 link_next
= TREE_CHAIN (link
);
7379 TREE_CHAIN (link
) = NULL_TREE
;
7380 vec_safe_push (clobbers
, link
);
7383 link_next
= NULL_TREE
;
7384 for (link
= ASM_LABELS (expr
); link
; ++i
, link
= link_next
)
7386 link_next
= TREE_CHAIN (link
);
7387 TREE_CHAIN (link
) = NULL_TREE
;
7388 vec_safe_push (labels
, link
);
7391 /* Do not add ASMs with errors to the gimple IL stream. */
7392 if (ret
!= GS_ERROR
)
7394 stmt
= gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr
)),
7395 inputs
, outputs
, clobbers
, labels
);
7397 /* asm is volatile if it was marked by the user as volatile or
7398 there are no outputs or this is an asm goto. */
7399 gimple_asm_set_volatile (stmt
,
7400 ASM_VOLATILE_P (expr
)
7403 gimple_asm_set_basic (stmt
, ASM_BASIC_P (expr
));
7404 gimple_asm_set_inline (stmt
, ASM_INLINE_P (expr
));
7406 gimplify_seq_add_stmt (pre_p
, stmt
);
7412 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
7413 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
7414 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
7415 return to this function.
7417 FIXME should we complexify the prequeue handling instead? Or use flags
7418 for all the cleanups and let the optimizer tighten them up? The current
7419 code seems pretty fragile; it will break on a cleanup within any
7420 non-conditional nesting. But any such nesting would be broken, anyway;
7421 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
7422 and continues out of it. We can do that at the RTL level, though, so
7423 having an optimizer to tighten up try/finally regions would be a Good
7426 static enum gimplify_status
7427 gimplify_cleanup_point_expr (tree
*expr_p
, gimple_seq
*pre_p
)
7429 gimple_stmt_iterator iter
;
7430 gimple_seq body_sequence
= NULL
;
7432 tree temp
= voidify_wrapper_expr (*expr_p
, NULL
);
7434 /* We only care about the number of conditions between the innermost
7435 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
7436 any cleanups collected outside the CLEANUP_POINT_EXPR. */
7437 int old_conds
= gimplify_ctxp
->conditions
;
7438 gimple_seq old_cleanups
= gimplify_ctxp
->conditional_cleanups
;
7439 bool old_in_cleanup_point_expr
= gimplify_ctxp
->in_cleanup_point_expr
;
7440 gimplify_ctxp
->conditions
= 0;
7441 gimplify_ctxp
->conditional_cleanups
= NULL
;
7442 gimplify_ctxp
->in_cleanup_point_expr
= true;
7444 gimplify_stmt (&TREE_OPERAND (*expr_p
, 0), &body_sequence
);
7446 gimplify_ctxp
->conditions
= old_conds
;
7447 gimplify_ctxp
->conditional_cleanups
= old_cleanups
;
7448 gimplify_ctxp
->in_cleanup_point_expr
= old_in_cleanup_point_expr
;
7450 for (iter
= gsi_start (body_sequence
); !gsi_end_p (iter
); )
7452 gimple
*wce
= gsi_stmt (iter
);
7454 if (gimple_code (wce
) == GIMPLE_WITH_CLEANUP_EXPR
)
7456 if (gsi_one_before_end_p (iter
))
7458 /* Note that gsi_insert_seq_before and gsi_remove do not
7459 scan operands, unlike some other sequence mutators. */
7460 if (!gimple_wce_cleanup_eh_only (wce
))
7461 gsi_insert_seq_before_without_update (&iter
,
7462 gimple_wce_cleanup (wce
),
7464 gsi_remove (&iter
, true);
7471 enum gimple_try_flags kind
;
7473 if (gimple_wce_cleanup_eh_only (wce
))
7474 kind
= GIMPLE_TRY_CATCH
;
7476 kind
= GIMPLE_TRY_FINALLY
;
7477 seq
= gsi_split_seq_after (iter
);
7479 gtry
= gimple_build_try (seq
, gimple_wce_cleanup (wce
), kind
);
7480 /* Do not use gsi_replace here, as it may scan operands.
7481 We want to do a simple structural modification only. */
7482 gsi_set_stmt (&iter
, gtry
);
7483 iter
= gsi_start (gtry
->eval
);
7490 gimplify_seq_add_seq (pre_p
, body_sequence
);
7503 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
7504 is the cleanup action required. EH_ONLY is true if the cleanup should
7505 only be executed if an exception is thrown, not on normal exit.
7506 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
7507 only valid for clobbers. */
7510 gimple_push_cleanup (tree var
, tree cleanup
, bool eh_only
, gimple_seq
*pre_p
,
7511 bool force_uncond
= false)
7514 gimple_seq cleanup_stmts
= NULL
;
7516 /* Errors can result in improperly nested cleanups. Which results in
7517 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
7521 if (gimple_conditional_context ())
7523 /* If we're in a conditional context, this is more complex. We only
7524 want to run the cleanup if we actually ran the initialization that
7525 necessitates it, but we want to run it after the end of the
7526 conditional context. So we wrap the try/finally around the
7527 condition and use a flag to determine whether or not to actually
7528 run the destructor. Thus
7532 becomes (approximately)
7536 if (test) { A::A(temp); flag = 1; val = f(temp); }
7539 if (flag) A::~A(temp);
7545 gimplify_stmt (&cleanup
, &cleanup_stmts
);
7546 wce
= gimple_build_wce (cleanup_stmts
);
7547 gimplify_seq_add_stmt (&gimplify_ctxp
->conditional_cleanups
, wce
);
7551 tree flag
= create_tmp_var (boolean_type_node
, "cleanup");
7552 gassign
*ffalse
= gimple_build_assign (flag
, boolean_false_node
);
7553 gassign
*ftrue
= gimple_build_assign (flag
, boolean_true_node
);
7555 cleanup
= build3 (COND_EXPR
, void_type_node
, flag
, cleanup
, NULL
);
7556 gimplify_stmt (&cleanup
, &cleanup_stmts
);
7557 wce
= gimple_build_wce (cleanup_stmts
);
7558 gimple_wce_set_cleanup_eh_only (wce
, eh_only
);
7560 gimplify_seq_add_stmt (&gimplify_ctxp
->conditional_cleanups
, ffalse
);
7561 gimplify_seq_add_stmt (&gimplify_ctxp
->conditional_cleanups
, wce
);
7562 gimplify_seq_add_stmt (pre_p
, ftrue
);
7564 /* Because of this manipulation, and the EH edges that jump
7565 threading cannot redirect, the temporary (VAR) will appear
7566 to be used uninitialized. Don't warn. */
7567 suppress_warning (var
, OPT_Wuninitialized
);
7572 gimplify_stmt (&cleanup
, &cleanup_stmts
);
7573 wce
= gimple_build_wce (cleanup_stmts
);
7574 gimple_wce_set_cleanup_eh_only (wce
, eh_only
);
7575 gimplify_seq_add_stmt (pre_p
, wce
);
7579 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
7581 static enum gimplify_status
7582 gimplify_target_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
7584 tree targ
= *expr_p
;
7585 tree temp
= TARGET_EXPR_SLOT (targ
);
7586 tree init
= TARGET_EXPR_INITIAL (targ
);
7587 enum gimplify_status ret
;
7589 bool unpoison_empty_seq
= false;
7590 gimple_stmt_iterator unpoison_it
;
7594 gimple_seq init_pre_p
= NULL
;
7596 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
7597 to the temps list. Handle also variable length TARGET_EXPRs. */
7598 if (!poly_int_tree_p (DECL_SIZE (temp
)))
7600 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp
)))
7601 gimplify_type_sizes (TREE_TYPE (temp
), &init_pre_p
);
7602 /* FIXME: this is correct only when the size of the type does
7603 not depend on expressions evaluated in init. */
7604 gimplify_vla_decl (temp
, &init_pre_p
);
7608 /* Save location where we need to place unpoisoning. It's possible
7609 that a variable will be converted to needs_to_live_in_memory. */
7610 unpoison_it
= gsi_last (*pre_p
);
7611 unpoison_empty_seq
= gsi_end_p (unpoison_it
);
7613 gimple_add_tmp_var (temp
);
7616 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
7617 expression is supposed to initialize the slot. */
7618 if (VOID_TYPE_P (TREE_TYPE (init
)))
7619 ret
= gimplify_expr (&init
, &init_pre_p
, post_p
, is_gimple_stmt
,
7623 tree init_expr
= build2 (INIT_EXPR
, void_type_node
, temp
, init
);
7625 ret
= gimplify_expr (&init
, &init_pre_p
, post_p
, is_gimple_stmt
,
7628 ggc_free (init_expr
);
7630 if (ret
== GS_ERROR
)
7632 /* PR c++/28266 Make sure this is expanded only once. */
7633 TARGET_EXPR_INITIAL (targ
) = NULL_TREE
;
7638 gimplify_and_add (init
, &init_pre_p
);
7640 /* Add a clobber for the temporary going out of scope, like
7641 gimplify_bind_expr. But only if we did not promote the
7642 temporary to static storage. */
7643 if (gimplify_ctxp
->in_cleanup_point_expr
7644 && !TREE_STATIC (temp
)
7645 && needs_to_live_in_memory (temp
))
7647 if (flag_stack_reuse
== SR_ALL
)
7649 tree clobber
= build_clobber (TREE_TYPE (temp
),
7650 CLOBBER_STORAGE_END
);
7651 clobber
= build2 (MODIFY_EXPR
, TREE_TYPE (temp
), temp
, clobber
);
7652 gimple_push_cleanup (temp
, clobber
, false, pre_p
, true);
7654 if (asan_poisoned_variables
7655 && DECL_ALIGN (temp
) <= MAX_SUPPORTED_STACK_ALIGNMENT
7656 && !TREE_STATIC (temp
)
7657 && dbg_cnt (asan_use_after_scope
)
7658 && !gimplify_omp_ctxp
)
7660 tree asan_cleanup
= build_asan_poison_call_expr (temp
);
7663 if (unpoison_empty_seq
)
7664 unpoison_it
= gsi_start (*pre_p
);
7666 asan_poison_variable (temp
, false, &unpoison_it
,
7667 unpoison_empty_seq
);
7668 gimple_push_cleanup (temp
, asan_cleanup
, false, pre_p
);
7673 gimple_seq_add_seq (pre_p
, init_pre_p
);
7675 /* If needed, push the cleanup for the temp. */
7676 if (TARGET_EXPR_CLEANUP (targ
))
7677 gimple_push_cleanup (temp
, TARGET_EXPR_CLEANUP (targ
),
7678 CLEANUP_EH_ONLY (targ
), pre_p
);
7680 /* Only expand this once. */
7681 TREE_OPERAND (targ
, 3) = init
;
7682 TARGET_EXPR_INITIAL (targ
) = NULL_TREE
;
7685 /* We should have expanded this before. */
7686 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp
));
7692 /* Gimplification of expression trees. */
7694 /* Gimplify an expression which appears at statement context. The
7695 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
7696 NULL, a new sequence is allocated.
7698 Return true if we actually added a statement to the queue. */
7701 gimplify_stmt (tree
*stmt_p
, gimple_seq
*seq_p
)
7703 gimple_seq_node last
;
7705 last
= gimple_seq_last (*seq_p
);
7706 gimplify_expr (stmt_p
, seq_p
, NULL
, is_gimple_stmt
, fb_none
);
7707 return last
!= gimple_seq_last (*seq_p
);
7710 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
7711 to CTX. If entries already exist, force them to be some flavor of private.
7712 If there is no enclosing parallel, do nothing. */
7715 omp_firstprivatize_variable (struct gimplify_omp_ctx
*ctx
, tree decl
)
7719 if (decl
== NULL
|| !DECL_P (decl
) || ctx
->region_type
== ORT_NONE
)
7724 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
7727 if (n
->value
& GOVD_SHARED
)
7728 n
->value
= GOVD_FIRSTPRIVATE
| (n
->value
& GOVD_SEEN
);
7729 else if (n
->value
& GOVD_MAP
)
7730 n
->value
|= GOVD_MAP_TO_ONLY
;
7734 else if ((ctx
->region_type
& ORT_TARGET
) != 0)
7736 if (ctx
->defaultmap
[GDMK_SCALAR
] & GOVD_FIRSTPRIVATE
)
7737 omp_add_variable (ctx
, decl
, GOVD_FIRSTPRIVATE
);
7739 omp_add_variable (ctx
, decl
, GOVD_MAP
| GOVD_MAP_TO_ONLY
);
7741 else if (ctx
->region_type
!= ORT_WORKSHARE
7742 && ctx
->region_type
!= ORT_TASKGROUP
7743 && ctx
->region_type
!= ORT_SIMD
7744 && ctx
->region_type
!= ORT_ACC
7745 && !(ctx
->region_type
& ORT_TARGET_DATA
))
7746 omp_add_variable (ctx
, decl
, GOVD_FIRSTPRIVATE
);
7748 ctx
= ctx
->outer_context
;
7753 /* Similarly for each of the type sizes of TYPE. */
7756 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx
*ctx
, tree type
)
7758 if (type
== NULL
|| type
== error_mark_node
)
7760 type
= TYPE_MAIN_VARIANT (type
);
7762 if (ctx
->privatized_types
->add (type
))
7765 switch (TREE_CODE (type
))
7771 case FIXED_POINT_TYPE
:
7772 omp_firstprivatize_variable (ctx
, TYPE_MIN_VALUE (type
));
7773 omp_firstprivatize_variable (ctx
, TYPE_MAX_VALUE (type
));
7777 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (type
));
7778 omp_firstprivatize_type_sizes (ctx
, TYPE_DOMAIN (type
));
7783 case QUAL_UNION_TYPE
:
7786 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
7787 if (TREE_CODE (field
) == FIELD_DECL
)
7789 omp_firstprivatize_variable (ctx
, DECL_FIELD_OFFSET (field
));
7790 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (field
));
7796 case REFERENCE_TYPE
:
7797 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (type
));
7804 omp_firstprivatize_variable (ctx
, TYPE_SIZE (type
));
7805 omp_firstprivatize_variable (ctx
, TYPE_SIZE_UNIT (type
));
7806 lang_hooks
.types
.omp_firstprivatize_type_sizes (ctx
, type
);
7809 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
7812 omp_add_variable (struct gimplify_omp_ctx
*ctx
, tree decl
, unsigned int flags
)
7815 unsigned int nflags
;
7818 if (error_operand_p (decl
) || ctx
->region_type
== ORT_NONE
)
7821 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
7822 there are constructors involved somewhere. Exception is a shared clause,
7823 there is nothing privatized in that case. */
7824 if ((flags
& GOVD_SHARED
) == 0
7825 && (TREE_ADDRESSABLE (TREE_TYPE (decl
))
7826 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl
))))
7829 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
7830 if (n
!= NULL
&& (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
7832 /* We shouldn't be re-adding the decl with the same data
7834 gcc_assert ((n
->value
& GOVD_DATA_SHARE_CLASS
& flags
) == 0);
7835 nflags
= n
->value
| flags
;
7836 /* The only combination of data sharing classes we should see is
7837 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
7838 reduction variables to be used in data sharing clauses. */
7839 gcc_assert ((ctx
->region_type
& ORT_ACC
) != 0
7840 || ((nflags
& GOVD_DATA_SHARE_CLASS
)
7841 == (GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE
))
7842 || (flags
& GOVD_DATA_SHARE_CLASS
) == 0);
7847 /* When adding a variable-sized variable, we have to handle all sorts
7848 of additional bits of data: the pointer replacement variable, and
7849 the parameters of the type. */
7850 if (DECL_SIZE (decl
) && !poly_int_tree_p (DECL_SIZE (decl
)))
7852 /* Add the pointer replacement variable as PRIVATE if the variable
7853 replacement is private, else FIRSTPRIVATE since we'll need the
7854 address of the original variable either for SHARED, or for the
7855 copy into or out of the context. */
7856 if (!(flags
& GOVD_LOCAL
) && ctx
->region_type
!= ORT_TASKGROUP
)
7858 if (flags
& GOVD_MAP
)
7859 nflags
= GOVD_MAP
| GOVD_MAP_TO_ONLY
| GOVD_EXPLICIT
;
7860 else if (flags
& GOVD_PRIVATE
)
7861 nflags
= GOVD_PRIVATE
;
7862 else if (((ctx
->region_type
& (ORT_TARGET
| ORT_TARGET_DATA
)) != 0
7863 && (flags
& GOVD_FIRSTPRIVATE
))
7864 || (ctx
->region_type
== ORT_TARGET_DATA
7865 && (flags
& GOVD_DATA_SHARE_CLASS
) == 0))
7866 nflags
= GOVD_PRIVATE
| GOVD_EXPLICIT
;
7868 nflags
= GOVD_FIRSTPRIVATE
;
7869 nflags
|= flags
& GOVD_SEEN
;
7870 t
= DECL_VALUE_EXPR (decl
);
7871 gcc_assert (INDIRECT_REF_P (t
));
7872 t
= TREE_OPERAND (t
, 0);
7873 gcc_assert (DECL_P (t
));
7874 omp_add_variable (ctx
, t
, nflags
);
7877 /* Add all of the variable and type parameters (which should have
7878 been gimplified to a formal temporary) as FIRSTPRIVATE. */
7879 omp_firstprivatize_variable (ctx
, DECL_SIZE_UNIT (decl
));
7880 omp_firstprivatize_variable (ctx
, DECL_SIZE (decl
));
7881 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (decl
));
7883 /* The variable-sized variable itself is never SHARED, only some form
7884 of PRIVATE. The sharing would take place via the pointer variable
7885 which we remapped above. */
7886 if (flags
& GOVD_SHARED
)
7887 flags
= GOVD_SHARED
| GOVD_DEBUG_PRIVATE
7888 | (flags
& (GOVD_SEEN
| GOVD_EXPLICIT
));
7890 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
7891 alloca statement we generate for the variable, so make sure it
7892 is available. This isn't automatically needed for the SHARED
7893 case, since we won't be allocating local storage then.
7894 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
7895 in this case omp_notice_variable will be called later
7896 on when it is gimplified. */
7897 else if (! (flags
& (GOVD_LOCAL
| GOVD_MAP
))
7898 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl
))))
7899 omp_notice_variable (ctx
, TYPE_SIZE_UNIT (TREE_TYPE (decl
)), true);
7901 else if ((flags
& (GOVD_MAP
| GOVD_LOCAL
)) == 0
7902 && omp_privatize_by_reference (decl
))
7904 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (decl
));
7906 /* Similar to the direct variable sized case above, we'll need the
7907 size of references being privatized. */
7908 if ((flags
& GOVD_SHARED
) == 0)
7910 t
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)));
7911 if (t
&& DECL_P (t
))
7912 omp_notice_variable (ctx
, t
, true);
7919 splay_tree_insert (ctx
->variables
, (splay_tree_key
)decl
, flags
);
7921 /* For reductions clauses in OpenACC loop directives, by default create a
7922 copy clause on the enclosing parallel construct for carrying back the
7924 if (ctx
->region_type
== ORT_ACC
&& (flags
& GOVD_REDUCTION
))
7926 struct gimplify_omp_ctx
*outer_ctx
= ctx
->outer_context
;
7929 n
= splay_tree_lookup (outer_ctx
->variables
, (splay_tree_key
)decl
);
7932 /* Ignore local variables and explicitly declared clauses. */
7933 if (n
->value
& (GOVD_LOCAL
| GOVD_EXPLICIT
))
7935 else if (outer_ctx
->region_type
== ORT_ACC_KERNELS
)
7937 /* According to the OpenACC spec, such a reduction variable
7938 should already have a copy map on a kernels construct,
7939 verify that here. */
7940 gcc_assert (!(n
->value
& GOVD_FIRSTPRIVATE
)
7941 && (n
->value
& GOVD_MAP
));
7943 else if (outer_ctx
->region_type
== ORT_ACC_PARALLEL
)
7945 /* Remove firstprivate and make it a copy map. */
7946 n
->value
&= ~GOVD_FIRSTPRIVATE
;
7947 n
->value
|= GOVD_MAP
;
7950 else if (outer_ctx
->region_type
== ORT_ACC_PARALLEL
)
7952 splay_tree_insert (outer_ctx
->variables
, (splay_tree_key
)decl
,
7953 GOVD_MAP
| GOVD_SEEN
);
7956 outer_ctx
= outer_ctx
->outer_context
;
7961 /* Notice a threadprivate variable DECL used in OMP context CTX.
7962 This just prints out diagnostics about threadprivate variable uses
7963 in untied tasks. If DECL2 is non-NULL, prevent this warning
7964 on that variable. */
7967 omp_notice_threadprivate_variable (struct gimplify_omp_ctx
*ctx
, tree decl
,
7971 struct gimplify_omp_ctx
*octx
;
7973 for (octx
= ctx
; octx
; octx
= octx
->outer_context
)
7974 if ((octx
->region_type
& ORT_TARGET
) != 0
7975 || octx
->order_concurrent
)
7977 n
= splay_tree_lookup (octx
->variables
, (splay_tree_key
)decl
);
7980 if (octx
->order_concurrent
)
7982 error ("threadprivate variable %qE used in a region with"
7983 " %<order(concurrent)%> clause", DECL_NAME (decl
));
7984 inform (octx
->location
, "enclosing region");
7988 error ("threadprivate variable %qE used in target region",
7990 inform (octx
->location
, "enclosing target region");
7992 splay_tree_insert (octx
->variables
, (splay_tree_key
)decl
, 0);
7995 splay_tree_insert (octx
->variables
, (splay_tree_key
)decl2
, 0);
7998 if (ctx
->region_type
!= ORT_UNTIED_TASK
)
8000 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
8003 error ("threadprivate variable %qE used in untied task",
8005 inform (ctx
->location
, "enclosing task");
8006 splay_tree_insert (ctx
->variables
, (splay_tree_key
)decl
, 0);
8009 splay_tree_insert (ctx
->variables
, (splay_tree_key
)decl2
, 0);
8013 /* Return true if global var DECL is device resident. */
8016 device_resident_p (tree decl
)
8018 tree attr
= lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl
));
8023 for (tree t
= TREE_VALUE (attr
); t
; t
= TREE_PURPOSE (t
))
8025 tree c
= TREE_VALUE (t
);
8026 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DEVICE_RESIDENT
)
8033 /* Return true if DECL has an ACC DECLARE attribute. */
8036 is_oacc_declared (tree decl
)
8038 tree t
= TREE_CODE (decl
) == MEM_REF
? TREE_OPERAND (decl
, 0) : decl
;
8039 tree declared
= lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t
));
8040 return declared
!= NULL_TREE
;
8043 /* Determine outer default flags for DECL mentioned in an OMP region
8044 but not declared in an enclosing clause.
8046 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
8047 remapped firstprivate instead of shared. To some extent this is
8048 addressed in omp_firstprivatize_type_sizes, but not
8052 omp_default_clause (struct gimplify_omp_ctx
*ctx
, tree decl
,
8053 bool in_code
, unsigned flags
)
8055 enum omp_clause_default_kind default_kind
= ctx
->default_kind
;
8056 enum omp_clause_default_kind kind
;
8058 kind
= lang_hooks
.decls
.omp_predetermined_sharing (decl
);
8059 if (ctx
->region_type
& ORT_TASK
)
8061 tree detach_clause
= omp_find_clause (ctx
->clauses
, OMP_CLAUSE_DETACH
);
8063 /* The event-handle specified by a detach clause should always be firstprivate,
8064 regardless of the current default. */
8065 if (detach_clause
&& OMP_CLAUSE_DECL (detach_clause
) == decl
)
8066 kind
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
8068 if (kind
!= OMP_CLAUSE_DEFAULT_UNSPECIFIED
)
8069 default_kind
= kind
;
8070 else if (VAR_P (decl
) && TREE_STATIC (decl
) && DECL_IN_CONSTANT_POOL (decl
))
8071 default_kind
= OMP_CLAUSE_DEFAULT_SHARED
;
8072 /* For C/C++ default({,first}private), variables with static storage duration
8073 declared in a namespace or global scope and referenced in construct
8074 must be explicitly specified, i.e. acts as default(none). */
8075 else if ((default_kind
== OMP_CLAUSE_DEFAULT_PRIVATE
8076 || default_kind
== OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
)
8078 && is_global_var (decl
)
8079 && (DECL_FILE_SCOPE_P (decl
)
8080 || (DECL_CONTEXT (decl
)
8081 && TREE_CODE (DECL_CONTEXT (decl
)) == NAMESPACE_DECL
))
8082 && !lang_GNU_Fortran ())
8083 default_kind
= OMP_CLAUSE_DEFAULT_NONE
;
8085 switch (default_kind
)
8087 case OMP_CLAUSE_DEFAULT_NONE
:
8091 if (ctx
->region_type
& ORT_PARALLEL
)
8093 else if ((ctx
->region_type
& ORT_TASKLOOP
) == ORT_TASKLOOP
)
8095 else if (ctx
->region_type
& ORT_TASK
)
8097 else if (ctx
->region_type
& ORT_TEAMS
)
8102 error ("%qE not specified in enclosing %qs",
8103 DECL_NAME (lang_hooks
.decls
.omp_report_decl (decl
)), rtype
);
8104 inform (ctx
->location
, "enclosing %qs", rtype
);
8107 case OMP_CLAUSE_DEFAULT_SHARED
:
8108 flags
|= GOVD_SHARED
;
8110 case OMP_CLAUSE_DEFAULT_PRIVATE
:
8111 flags
|= GOVD_PRIVATE
;
8113 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
:
8114 flags
|= GOVD_FIRSTPRIVATE
;
8116 case OMP_CLAUSE_DEFAULT_UNSPECIFIED
:
8117 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
8118 gcc_assert ((ctx
->region_type
& ORT_TASK
) != 0);
8119 if (struct gimplify_omp_ctx
*octx
= ctx
->outer_context
)
8121 omp_notice_variable (octx
, decl
, in_code
);
8122 for (; octx
; octx
= octx
->outer_context
)
8126 n2
= splay_tree_lookup (octx
->variables
, (splay_tree_key
) decl
);
8127 if ((octx
->region_type
& (ORT_TARGET_DATA
| ORT_TARGET
)) != 0
8128 && (n2
== NULL
|| (n2
->value
& GOVD_DATA_SHARE_CLASS
) == 0))
8130 if (n2
&& (n2
->value
& GOVD_DATA_SHARE_CLASS
) != GOVD_SHARED
)
8132 flags
|= GOVD_FIRSTPRIVATE
;
8135 if ((octx
->region_type
& (ORT_PARALLEL
| ORT_TEAMS
)) != 0)
8137 flags
|= GOVD_SHARED
;
8143 if (TREE_CODE (decl
) == PARM_DECL
8144 || (!is_global_var (decl
)
8145 && DECL_CONTEXT (decl
) == current_function_decl
))
8146 flags
|= GOVD_FIRSTPRIVATE
;
8148 flags
|= GOVD_SHARED
;
8159 /* Return string name for types of OpenACC constructs from ORT_* values. */
8162 oacc_region_type_name (enum omp_region_type region_type
)
8164 switch (region_type
)
8168 case ORT_ACC_PARALLEL
:
8170 case ORT_ACC_KERNELS
:
8172 case ORT_ACC_SERIAL
:
8179 /* Determine outer default flags for DECL mentioned in an OACC region
8180 but not declared in an enclosing clause. */
8183 oacc_default_clause (struct gimplify_omp_ctx
*ctx
, tree decl
, unsigned flags
)
8185 struct gimplify_omp_ctx
*ctx_default
= ctx
;
8186 /* If no 'default' clause appears on this compute construct... */
8187 if (ctx_default
->default_kind
== OMP_CLAUSE_DEFAULT_SHARED
)
8189 /* ..., see if one appears on a lexically containing 'data'
8191 while ((ctx_default
= ctx_default
->outer_context
))
8193 if (ctx_default
->region_type
== ORT_ACC_DATA
8194 && ctx_default
->default_kind
!= OMP_CLAUSE_DEFAULT_SHARED
)
8197 /* If not, reset. */
8202 bool on_device
= false;
8203 bool is_private
= false;
8204 bool declared
= is_oacc_declared (decl
);
8205 tree type
= TREE_TYPE (decl
);
8207 if (omp_privatize_by_reference (decl
))
8208 type
= TREE_TYPE (type
);
8210 /* For Fortran COMMON blocks, only used variables in those blocks are
8211 transfered and remapped. The block itself will have a private clause to
8212 avoid transfering the data twice.
8213 The hook evaluates to false by default. For a variable in Fortran's COMMON
8214 or EQUIVALENCE block, returns 'true' (as we have shared=false) - as only
8215 the variables in such a COMMON/EQUIVALENCE block shall be privatized not
8216 the whole block. For C++ and Fortran, it can also be true under certain
8217 other conditions, if DECL_HAS_VALUE_EXPR. */
8218 if (RECORD_OR_UNION_TYPE_P (type
))
8219 is_private
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, false);
8221 if ((ctx
->region_type
& (ORT_ACC_PARALLEL
| ORT_ACC_KERNELS
)) != 0
8222 && is_global_var (decl
)
8223 && device_resident_p (decl
)
8227 flags
|= GOVD_MAP_TO_ONLY
;
8230 switch (ctx
->region_type
)
8232 case ORT_ACC_KERNELS
:
8234 flags
|= GOVD_FIRSTPRIVATE
;
8235 else if (AGGREGATE_TYPE_P (type
))
8237 /* Aggregates default to 'present_or_copy', or 'present'. */
8238 if (ctx_default
->default_kind
!= OMP_CLAUSE_DEFAULT_PRESENT
)
8241 flags
|= GOVD_MAP
| GOVD_MAP_FORCE_PRESENT
;
8244 /* Scalars default to 'copy'. */
8245 flags
|= GOVD_MAP
| GOVD_MAP_FORCE
;
8249 case ORT_ACC_PARALLEL
:
8250 case ORT_ACC_SERIAL
:
8252 flags
|= GOVD_FIRSTPRIVATE
;
8253 else if (on_device
|| declared
)
8255 else if (AGGREGATE_TYPE_P (type
))
8257 /* Aggregates default to 'present_or_copy', or 'present'. */
8258 if (ctx_default
->default_kind
!= OMP_CLAUSE_DEFAULT_PRESENT
)
8261 flags
|= GOVD_MAP
| GOVD_MAP_FORCE_PRESENT
;
8264 /* Scalars default to 'firstprivate'. */
8265 flags
|= GOVD_FIRSTPRIVATE
;
8273 if (DECL_ARTIFICIAL (decl
))
8274 ; /* We can get compiler-generated decls, and should not complain
8276 else if (ctx_default
->default_kind
== OMP_CLAUSE_DEFAULT_NONE
)
8278 error ("%qE not specified in enclosing OpenACC %qs construct",
8279 DECL_NAME (lang_hooks
.decls
.omp_report_decl (decl
)),
8280 oacc_region_type_name (ctx
->region_type
));
8281 if (ctx_default
!= ctx
)
8282 inform (ctx
->location
, "enclosing OpenACC %qs construct and",
8283 oacc_region_type_name (ctx
->region_type
));
8284 inform (ctx_default
->location
,
8285 "enclosing OpenACC %qs construct with %qs clause",
8286 oacc_region_type_name (ctx_default
->region_type
),
8289 else if (ctx_default
->default_kind
== OMP_CLAUSE_DEFAULT_PRESENT
)
8290 ; /* Handled above. */
8292 gcc_checking_assert (ctx_default
->default_kind
== OMP_CLAUSE_DEFAULT_SHARED
);
8297 /* Record the fact that DECL was used within the OMP context CTX.
8298 IN_CODE is true when real code uses DECL, and false when we should
8299 merely emit default(none) errors. Return true if DECL is going to
8300 be remapped and thus DECL shouldn't be gimplified into its
8301 DECL_VALUE_EXPR (if any). */
8304 omp_notice_variable (struct gimplify_omp_ctx
*ctx
, tree decl
, bool in_code
)
8307 unsigned flags
= in_code
? GOVD_SEEN
: 0;
8308 bool ret
= false, shared
;
8310 if (error_operand_p (decl
))
8313 if (DECL_ARTIFICIAL (decl
))
8315 tree attr
= lookup_attribute ("omp allocate var", DECL_ATTRIBUTES (decl
));
8317 decl
= TREE_VALUE (TREE_VALUE (attr
));
8320 if (ctx
->region_type
== ORT_NONE
)
8321 return lang_hooks
.decls
.omp_disregard_value_expr (decl
, false);
8323 if (is_global_var (decl
))
8325 /* Threadprivate variables are predetermined. */
8326 if (DECL_THREAD_LOCAL_P (decl
))
8327 return omp_notice_threadprivate_variable (ctx
, decl
, NULL_TREE
);
8329 if (DECL_HAS_VALUE_EXPR_P (decl
))
8331 if (ctx
->region_type
& ORT_ACC
)
8332 /* For OpenACC, defer expansion of value to avoid transfering
8333 privatized common block data instead of im-/explicitly transfered
8334 variables which are in common blocks. */
8338 tree value
= get_base_address (DECL_VALUE_EXPR (decl
));
8340 if (value
&& DECL_P (value
) && DECL_THREAD_LOCAL_P (value
))
8341 return omp_notice_threadprivate_variable (ctx
, decl
, value
);
8345 if (gimplify_omp_ctxp
->outer_context
== NULL
8347 && oacc_get_fn_attrib (current_function_decl
))
8349 location_t loc
= DECL_SOURCE_LOCATION (decl
);
8351 if (lookup_attribute ("omp declare target link",
8352 DECL_ATTRIBUTES (decl
)))
8355 "%qE with %<link%> clause used in %<routine%> function",
8359 else if (!lookup_attribute ("omp declare target",
8360 DECL_ATTRIBUTES (decl
)))
8363 "%qE requires a %<declare%> directive for use "
8364 "in a %<routine%> function", DECL_NAME (decl
));
8370 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
8371 if ((ctx
->region_type
& ORT_TARGET
) != 0)
8375 unsigned nflags
= flags
;
8376 if ((ctx
->region_type
& ORT_ACC
) == 0)
8378 bool is_declare_target
= false;
8379 if (is_global_var (decl
)
8380 && varpool_node::get_create (decl
)->offloadable
)
8382 struct gimplify_omp_ctx
*octx
;
8383 for (octx
= ctx
->outer_context
;
8384 octx
; octx
= octx
->outer_context
)
8386 n
= splay_tree_lookup (octx
->variables
,
8387 (splay_tree_key
)decl
);
8389 && (n
->value
& GOVD_DATA_SHARE_CLASS
) != GOVD_SHARED
8390 && (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
8393 is_declare_target
= octx
== NULL
;
8395 if (!is_declare_target
)
8398 enum omp_clause_defaultmap_kind kind
;
8399 if (lang_hooks
.decls
.omp_allocatable_p (decl
))
8400 gdmk
= GDMK_ALLOCATABLE
;
8401 else if (lang_hooks
.decls
.omp_scalar_target_p (decl
))
8402 gdmk
= GDMK_SCALAR_TARGET
;
8403 else if (lang_hooks
.decls
.omp_scalar_p (decl
, false))
8405 else if (TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
8406 || (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
8407 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl
)))
8409 gdmk
= GDMK_POINTER
;
8411 gdmk
= GDMK_AGGREGATE
;
8412 kind
= lang_hooks
.decls
.omp_predetermined_mapping (decl
);
8413 if (kind
!= OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED
)
8415 if (kind
== OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE
)
8416 nflags
|= GOVD_FIRSTPRIVATE
;
8417 else if (kind
== OMP_CLAUSE_DEFAULTMAP_TO
)
8418 nflags
|= GOVD_MAP
| GOVD_MAP_TO_ONLY
;
8422 else if (ctx
->defaultmap
[gdmk
] == 0)
8424 tree d
= lang_hooks
.decls
.omp_report_decl (decl
);
8425 error ("%qE not specified in enclosing %<target%>",
8427 inform (ctx
->location
, "enclosing %<target%>");
8429 else if (ctx
->defaultmap
[gdmk
]
8430 & (GOVD_MAP_0LEN_ARRAY
| GOVD_FIRSTPRIVATE
))
8431 nflags
|= ctx
->defaultmap
[gdmk
];
8432 else if (ctx
->defaultmap
[gdmk
] & GOVD_MAP_FORCE_PRESENT
)
8434 gcc_assert (ctx
->defaultmap
[gdmk
] & GOVD_MAP
);
8435 nflags
|= ctx
->defaultmap
[gdmk
] | GOVD_MAP_ALLOC_ONLY
;
8439 gcc_assert (ctx
->defaultmap
[gdmk
] & GOVD_MAP
);
8440 nflags
|= ctx
->defaultmap
[gdmk
] & ~GOVD_MAP
;
8445 struct gimplify_omp_ctx
*octx
= ctx
->outer_context
;
8446 if ((ctx
->region_type
& ORT_ACC
) && octx
)
8448 /* Look in outer OpenACC contexts, to see if there's a
8449 data attribute for this variable. */
8450 omp_notice_variable (octx
, decl
, in_code
);
8452 for (; octx
; octx
= octx
->outer_context
)
8454 if (!(octx
->region_type
& (ORT_TARGET_DATA
| ORT_TARGET
)))
8457 = splay_tree_lookup (octx
->variables
,
8458 (splay_tree_key
) decl
);
8461 if (octx
->region_type
== ORT_ACC_HOST_DATA
)
8462 error ("variable %qE declared in enclosing "
8463 "%<host_data%> region", DECL_NAME (decl
));
8465 if (octx
->region_type
== ORT_ACC_DATA
8466 && (n2
->value
& GOVD_MAP_0LEN_ARRAY
))
8467 nflags
|= GOVD_MAP_0LEN_ARRAY
;
8473 if ((nflags
& ~(GOVD_MAP_TO_ONLY
| GOVD_MAP_FROM_ONLY
8474 | GOVD_MAP_ALLOC_ONLY
)) == flags
)
8476 tree type
= TREE_TYPE (decl
);
8478 if (gimplify_omp_ctxp
->target_firstprivatize_array_bases
8479 && omp_privatize_by_reference (decl
))
8480 type
= TREE_TYPE (type
);
8481 if (!omp_mappable_type (type
))
8483 error ("%qD referenced in target region does not have "
8484 "a mappable type", decl
);
8485 nflags
|= GOVD_MAP
| GOVD_EXPLICIT
;
8489 if ((ctx
->region_type
& ORT_ACC
) != 0)
8490 nflags
= oacc_default_clause (ctx
, decl
, flags
);
8496 omp_add_variable (ctx
, decl
, nflags
);
8497 if (ctx
->region_type
& ORT_ACC
)
8498 /* For OpenACC, as remarked above, defer expansion. */
8501 shared
= (nflags
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
)) == 0;
8502 ret
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, shared
);
8506 if (ctx
->region_type
& ORT_ACC
)
8507 /* For OpenACC, as remarked above, defer expansion. */
8510 shared
= ((n
->value
| flags
)
8511 & (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
)) == 0;
8512 ret
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, shared
);
8513 /* If nothing changed, there's nothing left to do. */
8514 if ((n
->value
& flags
) == flags
)
8524 if (ctx
->region_type
== ORT_WORKSHARE
8525 || ctx
->region_type
== ORT_TASKGROUP
8526 || ctx
->region_type
== ORT_SIMD
8527 || ctx
->region_type
== ORT_ACC
8528 || (ctx
->region_type
& ORT_TARGET_DATA
) != 0)
8531 flags
= omp_default_clause (ctx
, decl
, in_code
, flags
);
8533 if ((flags
& GOVD_PRIVATE
)
8534 && lang_hooks
.decls
.omp_private_outer_ref (decl
))
8535 flags
|= GOVD_PRIVATE_OUTER_REF
;
8537 omp_add_variable (ctx
, decl
, flags
);
8539 shared
= (flags
& GOVD_SHARED
) != 0;
8540 ret
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, shared
);
8544 /* Don't mark as GOVD_SEEN addressable temporaries seen only in simd
8545 lb, b or incr expressions, those shouldn't be turned into simd arrays. */
8546 if (ctx
->region_type
== ORT_SIMD
8547 && ctx
->in_for_exprs
8548 && ((n
->value
& (GOVD_PRIVATE
| GOVD_SEEN
| GOVD_EXPLICIT
))
8550 flags
&= ~GOVD_SEEN
;
8552 if ((n
->value
& (GOVD_SEEN
| GOVD_LOCAL
)) == 0
8553 && (flags
& (GOVD_SEEN
| GOVD_LOCAL
)) == GOVD_SEEN
8554 && DECL_SIZE (decl
))
8556 if (TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
8559 tree t
= DECL_VALUE_EXPR (decl
);
8560 gcc_assert (INDIRECT_REF_P (t
));
8561 t
= TREE_OPERAND (t
, 0);
8562 gcc_assert (DECL_P (t
));
8563 n2
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) t
);
8564 n2
->value
|= GOVD_SEEN
;
8566 else if (omp_privatize_by_reference (decl
)
8567 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)))
8568 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
))))
8572 tree t
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)));
8573 gcc_assert (DECL_P (t
));
8574 n2
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) t
);
8576 omp_notice_variable (ctx
, t
, true);
8580 if (ctx
->region_type
& ORT_ACC
)
8581 /* For OpenACC, as remarked above, defer expansion. */
8584 shared
= ((flags
| n
->value
) & GOVD_SHARED
) != 0;
8585 ret
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, shared
);
8587 /* If nothing changed, there's nothing left to do. */
8588 if ((n
->value
& flags
) == flags
)
8594 /* If the variable is private in the current context, then we don't
8595 need to propagate anything to an outer context. */
8596 if ((flags
& GOVD_PRIVATE
) && !(flags
& GOVD_PRIVATE_OUTER_REF
))
8598 if ((flags
& (GOVD_LINEAR
| GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
8599 == (GOVD_LINEAR
| GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
8601 if ((flags
& (GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE
8602 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
8603 == (GOVD_LASTPRIVATE
| GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
8605 if (ctx
->outer_context
8606 && omp_notice_variable (ctx
->outer_context
, decl
, in_code
))
8611 /* Verify that DECL is private within CTX. If there's specific information
8612 to the contrary in the innermost scope, generate an error. */
8615 omp_is_private (struct gimplify_omp_ctx
*ctx
, tree decl
, int simd
)
8619 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
8622 if (n
->value
& GOVD_SHARED
)
8624 if (ctx
== gimplify_omp_ctxp
)
8627 error ("iteration variable %qE is predetermined linear",
8630 error ("iteration variable %qE should be private",
8632 n
->value
= GOVD_PRIVATE
;
8638 else if ((n
->value
& GOVD_EXPLICIT
) != 0
8639 && (ctx
== gimplify_omp_ctxp
8640 || (ctx
->region_type
== ORT_COMBINED_PARALLEL
8641 && gimplify_omp_ctxp
->outer_context
== ctx
)))
8643 if ((n
->value
& GOVD_FIRSTPRIVATE
) != 0)
8644 error ("iteration variable %qE should not be firstprivate",
8646 else if ((n
->value
& GOVD_REDUCTION
) != 0)
8647 error ("iteration variable %qE should not be reduction",
8649 else if (simd
!= 1 && (n
->value
& GOVD_LINEAR
) != 0)
8650 error ("iteration variable %qE should not be linear",
8653 return (ctx
== gimplify_omp_ctxp
8654 || (ctx
->region_type
== ORT_COMBINED_PARALLEL
8655 && gimplify_omp_ctxp
->outer_context
== ctx
));
8658 if (ctx
->region_type
!= ORT_WORKSHARE
8659 && ctx
->region_type
!= ORT_TASKGROUP
8660 && ctx
->region_type
!= ORT_SIMD
8661 && ctx
->region_type
!= ORT_ACC
)
8663 else if (ctx
->outer_context
)
8664 return omp_is_private (ctx
->outer_context
, decl
, simd
);
8668 /* Return true if DECL is private within a parallel region
8669 that binds to the current construct's context or in parallel
8670 region's REDUCTION clause. */
8673 omp_check_private (struct gimplify_omp_ctx
*ctx
, tree decl
, bool copyprivate
)
8679 ctx
= ctx
->outer_context
;
8682 if (is_global_var (decl
))
8685 /* References might be private, but might be shared too,
8686 when checking for copyprivate, assume they might be
8687 private, otherwise assume they might be shared. */
8691 if (omp_privatize_by_reference (decl
))
8694 /* Treat C++ privatized non-static data members outside
8695 of the privatization the same. */
8696 if (omp_member_access_dummy_var (decl
))
8702 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
8704 if ((ctx
->region_type
& (ORT_TARGET
| ORT_TARGET_DATA
)) != 0
8705 && (n
== NULL
|| (n
->value
& GOVD_DATA_SHARE_CLASS
) == 0))
8707 if ((ctx
->region_type
& ORT_TARGET_DATA
) != 0
8709 || (n
->value
& GOVD_MAP
) == 0)
8716 if ((n
->value
& GOVD_LOCAL
) != 0
8717 && omp_member_access_dummy_var (decl
))
8719 return (n
->value
& GOVD_SHARED
) == 0;
8722 if (ctx
->region_type
== ORT_WORKSHARE
8723 || ctx
->region_type
== ORT_TASKGROUP
8724 || ctx
->region_type
== ORT_SIMD
8725 || ctx
->region_type
== ORT_ACC
)
8734 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
8737 find_decl_expr (tree
*tp
, int *walk_subtrees
, void *data
)
8741 /* If this node has been visited, unmark it and keep looking. */
8742 if (TREE_CODE (t
) == DECL_EXPR
&& DECL_EXPR_DECL (t
) == (tree
) data
)
8745 if (IS_TYPE_OR_DECL_P (t
))
8751 /* Gimplify the affinity clause but effectively ignore it.
8754 if ((step > 1) ? var <= end : var > end)
8755 locatator_var_expr; */
8758 gimplify_omp_affinity (tree
*list_p
, gimple_seq
*pre_p
)
8760 tree last_iter
= NULL_TREE
;
8761 tree last_bind
= NULL_TREE
;
8762 tree label
= NULL_TREE
;
8763 tree
*last_body
= NULL
;
8764 for (tree c
= *list_p
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8765 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_AFFINITY
)
8767 tree t
= OMP_CLAUSE_DECL (c
);
8768 if (TREE_CODE (t
) == TREE_LIST
8770 && TREE_CODE (TREE_PURPOSE (t
)) == TREE_VEC
)
8772 if (TREE_VALUE (t
) == null_pointer_node
)
8774 if (TREE_PURPOSE (t
) != last_iter
)
8778 append_to_statement_list (label
, last_body
);
8779 gimplify_and_add (last_bind
, pre_p
);
8780 last_bind
= NULL_TREE
;
8782 for (tree it
= TREE_PURPOSE (t
); it
; it
= TREE_CHAIN (it
))
8784 if (gimplify_expr (&TREE_VEC_ELT (it
, 1), pre_p
, NULL
,
8785 is_gimple_val
, fb_rvalue
) == GS_ERROR
8786 || gimplify_expr (&TREE_VEC_ELT (it
, 2), pre_p
, NULL
,
8787 is_gimple_val
, fb_rvalue
) == GS_ERROR
8788 || gimplify_expr (&TREE_VEC_ELT (it
, 3), pre_p
, NULL
,
8789 is_gimple_val
, fb_rvalue
) == GS_ERROR
8790 || (gimplify_expr (&TREE_VEC_ELT (it
, 4), pre_p
, NULL
,
8791 is_gimple_val
, fb_rvalue
)
8795 last_iter
= TREE_PURPOSE (t
);
8796 tree block
= TREE_VEC_ELT (TREE_PURPOSE (t
), 5);
8797 last_bind
= build3 (BIND_EXPR
, void_type_node
, BLOCK_VARS (block
),
8799 last_body
= &BIND_EXPR_BODY (last_bind
);
8800 tree cond
= NULL_TREE
;
8801 location_t loc
= OMP_CLAUSE_LOCATION (c
);
8802 for (tree it
= TREE_PURPOSE (t
); it
; it
= TREE_CHAIN (it
))
8804 tree var
= TREE_VEC_ELT (it
, 0);
8805 tree begin
= TREE_VEC_ELT (it
, 1);
8806 tree end
= TREE_VEC_ELT (it
, 2);
8807 tree step
= TREE_VEC_ELT (it
, 3);
8808 loc
= DECL_SOURCE_LOCATION (var
);
8809 tree tem
= build2_loc (loc
, MODIFY_EXPR
, void_type_node
,
8811 append_to_statement_list_force (tem
, last_body
);
8813 tree cond1
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
8814 step
, build_zero_cst (TREE_TYPE (step
)));
8815 tree cond2
= fold_build2_loc (loc
, LE_EXPR
, boolean_type_node
,
8817 tree cond3
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
8819 cond1
= fold_build3_loc (loc
, COND_EXPR
, boolean_type_node
,
8820 cond1
, cond2
, cond3
);
8822 cond
= fold_build2_loc (loc
, TRUTH_AND_EXPR
,
8823 boolean_type_node
, cond
, cond1
);
8827 tree cont_label
= create_artificial_label (loc
);
8828 label
= build1 (LABEL_EXPR
, void_type_node
, cont_label
);
8829 tree tem
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
, cond
,
8831 build_and_jump (&cont_label
));
8832 append_to_statement_list_force (tem
, last_body
);
8834 if (TREE_CODE (TREE_VALUE (t
)) == COMPOUND_EXPR
)
8836 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t
), 0),
8838 TREE_VALUE (t
) = TREE_OPERAND (TREE_VALUE (t
), 1);
8840 if (error_operand_p (TREE_VALUE (t
)))
8842 append_to_statement_list_force (TREE_VALUE (t
), last_body
);
8843 TREE_VALUE (t
) = null_pointer_node
;
8849 append_to_statement_list (label
, last_body
);
8850 gimplify_and_add (last_bind
, pre_p
);
8851 last_bind
= NULL_TREE
;
8853 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPOUND_EXPR
)
8855 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0), pre_p
,
8856 NULL
, is_gimple_val
, fb_rvalue
);
8857 OMP_CLAUSE_DECL (c
) = TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
8859 if (error_operand_p (OMP_CLAUSE_DECL (c
)))
8861 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
, NULL
,
8862 is_gimple_lvalue
, fb_lvalue
) == GS_ERROR
)
8864 gimplify_and_add (OMP_CLAUSE_DECL (c
), pre_p
);
8869 append_to_statement_list (label
, last_body
);
8870 gimplify_and_add (last_bind
, pre_p
);
8875 /* If *LIST_P contains any OpenMP depend clauses with iterators,
8876 lower all the depend clauses by populating corresponding depend
8877 array. Returns 0 if there are no such depend clauses, or
8878 2 if all depend clauses should be removed, 1 otherwise. */
8881 gimplify_omp_depend (tree
*list_p
, gimple_seq
*pre_p
)
8885 size_t n
[5] = { 0, 0, 0, 0, 0 };
8887 tree counts
[5] = { NULL_TREE
, NULL_TREE
, NULL_TREE
, NULL_TREE
, NULL_TREE
};
8888 tree last_iter
= NULL_TREE
, last_count
= NULL_TREE
;
8890 location_t first_loc
= UNKNOWN_LOCATION
;
8892 for (c
= *list_p
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8893 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
8895 switch (OMP_CLAUSE_DEPEND_KIND (c
))
8897 case OMP_CLAUSE_DEPEND_IN
:
8900 case OMP_CLAUSE_DEPEND_OUT
:
8901 case OMP_CLAUSE_DEPEND_INOUT
:
8904 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
8907 case OMP_CLAUSE_DEPEND_DEPOBJ
:
8910 case OMP_CLAUSE_DEPEND_INOUTSET
:
8916 tree t
= OMP_CLAUSE_DECL (c
);
8917 if (first_loc
== UNKNOWN_LOCATION
)
8918 first_loc
= OMP_CLAUSE_LOCATION (c
);
8919 if (TREE_CODE (t
) == TREE_LIST
8921 && TREE_CODE (TREE_PURPOSE (t
)) == TREE_VEC
)
8923 if (TREE_PURPOSE (t
) != last_iter
)
8925 tree tcnt
= size_one_node
;
8926 for (tree it
= TREE_PURPOSE (t
); it
; it
= TREE_CHAIN (it
))
8928 if (gimplify_expr (&TREE_VEC_ELT (it
, 1), pre_p
, NULL
,
8929 is_gimple_val
, fb_rvalue
) == GS_ERROR
8930 || gimplify_expr (&TREE_VEC_ELT (it
, 2), pre_p
, NULL
,
8931 is_gimple_val
, fb_rvalue
) == GS_ERROR
8932 || gimplify_expr (&TREE_VEC_ELT (it
, 3), pre_p
, NULL
,
8933 is_gimple_val
, fb_rvalue
) == GS_ERROR
8934 || (gimplify_expr (&TREE_VEC_ELT (it
, 4), pre_p
, NULL
,
8935 is_gimple_val
, fb_rvalue
)
8938 tree var
= TREE_VEC_ELT (it
, 0);
8939 tree begin
= TREE_VEC_ELT (it
, 1);
8940 tree end
= TREE_VEC_ELT (it
, 2);
8941 tree step
= TREE_VEC_ELT (it
, 3);
8942 tree orig_step
= TREE_VEC_ELT (it
, 4);
8943 tree type
= TREE_TYPE (var
);
8944 tree stype
= TREE_TYPE (step
);
8945 location_t loc
= DECL_SOURCE_LOCATION (var
);
8947 /* Compute count for this iterator as
8949 ? (begin < end ? (end - begin + (step - 1)) / step : 0)
8950 : (begin > end ? (end - begin + (step + 1)) / step : 0)
8951 and compute product of those for the entire depend
8953 if (POINTER_TYPE_P (type
))
8954 endmbegin
= fold_build2_loc (loc
, POINTER_DIFF_EXPR
,
8957 endmbegin
= fold_build2_loc (loc
, MINUS_EXPR
, type
,
8959 tree stepm1
= fold_build2_loc (loc
, MINUS_EXPR
, stype
,
8961 build_int_cst (stype
, 1));
8962 tree stepp1
= fold_build2_loc (loc
, PLUS_EXPR
, stype
, step
,
8963 build_int_cst (stype
, 1));
8964 tree pos
= fold_build2_loc (loc
, PLUS_EXPR
, stype
,
8965 unshare_expr (endmbegin
),
8967 pos
= fold_build2_loc (loc
, TRUNC_DIV_EXPR
, stype
,
8969 tree neg
= fold_build2_loc (loc
, PLUS_EXPR
, stype
,
8971 if (TYPE_UNSIGNED (stype
))
8973 neg
= fold_build1_loc (loc
, NEGATE_EXPR
, stype
, neg
);
8974 step
= fold_build1_loc (loc
, NEGATE_EXPR
, stype
, step
);
8976 neg
= fold_build2_loc (loc
, TRUNC_DIV_EXPR
, stype
,
8979 tree cond
= fold_build2_loc (loc
, LT_EXPR
,
8982 pos
= fold_build3_loc (loc
, COND_EXPR
, stype
, cond
, pos
,
8983 build_int_cst (stype
, 0));
8984 cond
= fold_build2_loc (loc
, LT_EXPR
, boolean_type_node
,
8986 neg
= fold_build3_loc (loc
, COND_EXPR
, stype
, cond
, neg
,
8987 build_int_cst (stype
, 0));
8988 tree osteptype
= TREE_TYPE (orig_step
);
8989 cond
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
8991 build_int_cst (osteptype
, 0));
8992 tree cnt
= fold_build3_loc (loc
, COND_EXPR
, stype
,
8994 cnt
= fold_convert_loc (loc
, sizetype
, cnt
);
8995 if (gimplify_expr (&cnt
, pre_p
, NULL
, is_gimple_val
,
8996 fb_rvalue
) == GS_ERROR
)
8998 tcnt
= size_binop_loc (loc
, MULT_EXPR
, tcnt
, cnt
);
9000 if (gimplify_expr (&tcnt
, pre_p
, NULL
, is_gimple_val
,
9001 fb_rvalue
) == GS_ERROR
)
9003 last_iter
= TREE_PURPOSE (t
);
9006 if (counts
[i
] == NULL_TREE
)
9007 counts
[i
] = last_count
;
9009 counts
[i
] = size_binop_loc (OMP_CLAUSE_LOCATION (c
),
9010 PLUS_EXPR
, counts
[i
], last_count
);
9015 for (i
= 0; i
< 5; i
++)
9021 tree total
= size_zero_node
;
9022 for (i
= 0; i
< 5; i
++)
9024 unused
[i
] = counts
[i
] == NULL_TREE
&& n
[i
] == 0;
9025 if (counts
[i
] == NULL_TREE
)
9026 counts
[i
] = size_zero_node
;
9028 counts
[i
] = size_binop (PLUS_EXPR
, counts
[i
], size_int (n
[i
]));
9029 if (gimplify_expr (&counts
[i
], pre_p
, NULL
, is_gimple_val
,
9030 fb_rvalue
) == GS_ERROR
)
9032 total
= size_binop (PLUS_EXPR
, total
, counts
[i
]);
9035 if (gimplify_expr (&total
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
9038 bool is_old
= unused
[1] && unused
[3] && unused
[4];
9039 tree totalpx
= size_binop (PLUS_EXPR
, unshare_expr (total
),
9040 size_int (is_old
? 1 : 4));
9042 totalpx
= size_binop (PLUS_EXPR
, totalpx
,
9043 size_binop (MULT_EXPR
, counts
[4], size_int (2)));
9044 tree type
= build_array_type (ptr_type_node
, build_index_type (totalpx
));
9045 tree array
= create_tmp_var_raw (type
);
9046 TREE_ADDRESSABLE (array
) = 1;
9047 if (!poly_int_tree_p (totalpx
))
9049 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (array
)))
9050 gimplify_type_sizes (TREE_TYPE (array
), pre_p
);
9051 if (gimplify_omp_ctxp
)
9053 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
9055 && (ctx
->region_type
== ORT_WORKSHARE
9056 || ctx
->region_type
== ORT_TASKGROUP
9057 || ctx
->region_type
== ORT_SIMD
9058 || ctx
->region_type
== ORT_ACC
))
9059 ctx
= ctx
->outer_context
;
9061 omp_add_variable (ctx
, array
, GOVD_LOCAL
| GOVD_SEEN
);
9063 gimplify_vla_decl (array
, pre_p
);
9066 gimple_add_tmp_var (array
);
9067 tree r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (0), NULL_TREE
,
9072 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
,
9073 build_int_cst (ptr_type_node
, 0));
9074 gimplify_and_add (tem
, pre_p
);
9075 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (1), NULL_TREE
,
9078 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
,
9079 fold_convert (ptr_type_node
, total
));
9080 gimplify_and_add (tem
, pre_p
);
9081 for (i
= 1; i
< (is_old
? 2 : 4); i
++)
9083 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (i
+ !is_old
),
9084 NULL_TREE
, NULL_TREE
);
9085 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
, counts
[i
- 1]);
9086 gimplify_and_add (tem
, pre_p
);
9093 for (i
= 0; i
< 5; i
++)
9095 if (i
&& (i
>= j
|| unused
[i
- 1]))
9097 cnts
[i
] = cnts
[i
- 1];
9100 cnts
[i
] = create_tmp_var (sizetype
);
9102 g
= gimple_build_assign (cnts
[i
], size_int (is_old
? 2 : 5));
9107 t
= size_binop (PLUS_EXPR
, counts
[0], size_int (2));
9109 t
= size_binop (PLUS_EXPR
, cnts
[i
- 1], counts
[i
- 1]);
9110 if (gimplify_expr (&t
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
9113 g
= gimple_build_assign (cnts
[i
], t
);
9115 gimple_seq_add_stmt (pre_p
, g
);
9118 cnts
[5] = NULL_TREE
;
9121 tree t
= size_binop (PLUS_EXPR
, total
, size_int (5));
9122 cnts
[5] = create_tmp_var (sizetype
);
9123 g
= gimple_build_assign (cnts
[i
], t
);
9124 gimple_seq_add_stmt (pre_p
, g
);
9127 last_iter
= NULL_TREE
;
9128 tree last_bind
= NULL_TREE
;
9129 tree
*last_body
= NULL
;
9130 for (c
= *list_p
; c
; c
= OMP_CLAUSE_CHAIN (c
))
9131 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
9133 switch (OMP_CLAUSE_DEPEND_KIND (c
))
9135 case OMP_CLAUSE_DEPEND_IN
:
9138 case OMP_CLAUSE_DEPEND_OUT
:
9139 case OMP_CLAUSE_DEPEND_INOUT
:
9142 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
9145 case OMP_CLAUSE_DEPEND_DEPOBJ
:
9148 case OMP_CLAUSE_DEPEND_INOUTSET
:
9154 tree t
= OMP_CLAUSE_DECL (c
);
9155 if (TREE_CODE (t
) == TREE_LIST
9157 && TREE_CODE (TREE_PURPOSE (t
)) == TREE_VEC
)
9159 if (TREE_PURPOSE (t
) != last_iter
)
9162 gimplify_and_add (last_bind
, pre_p
);
9163 tree block
= TREE_VEC_ELT (TREE_PURPOSE (t
), 5);
9164 last_bind
= build3 (BIND_EXPR
, void_type_node
,
9165 BLOCK_VARS (block
), NULL
, block
);
9166 TREE_SIDE_EFFECTS (last_bind
) = 1;
9167 SET_EXPR_LOCATION (last_bind
, OMP_CLAUSE_LOCATION (c
));
9168 tree
*p
= &BIND_EXPR_BODY (last_bind
);
9169 for (tree it
= TREE_PURPOSE (t
); it
; it
= TREE_CHAIN (it
))
9171 tree var
= TREE_VEC_ELT (it
, 0);
9172 tree begin
= TREE_VEC_ELT (it
, 1);
9173 tree end
= TREE_VEC_ELT (it
, 2);
9174 tree step
= TREE_VEC_ELT (it
, 3);
9175 tree orig_step
= TREE_VEC_ELT (it
, 4);
9176 tree type
= TREE_TYPE (var
);
9177 location_t loc
= DECL_SOURCE_LOCATION (var
);
9185 if (orig_step > 0) {
9186 if (var < end) goto beg_label;
9188 if (var > end) goto beg_label;
9190 for each iterator, with inner iterators added to
9192 tree beg_label
= create_artificial_label (loc
);
9193 tree cond_label
= NULL_TREE
;
9194 tem
= build2_loc (loc
, MODIFY_EXPR
, void_type_node
,
9196 append_to_statement_list_force (tem
, p
);
9197 tem
= build_and_jump (&cond_label
);
9198 append_to_statement_list_force (tem
, p
);
9199 tem
= build1 (LABEL_EXPR
, void_type_node
, beg_label
);
9200 append_to_statement_list (tem
, p
);
9201 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL_TREE
,
9202 NULL_TREE
, NULL_TREE
);
9203 TREE_SIDE_EFFECTS (bind
) = 1;
9204 SET_EXPR_LOCATION (bind
, loc
);
9205 append_to_statement_list_force (bind
, p
);
9206 if (POINTER_TYPE_P (type
))
9207 tem
= build2_loc (loc
, POINTER_PLUS_EXPR
, type
,
9208 var
, fold_convert_loc (loc
, sizetype
,
9211 tem
= build2_loc (loc
, PLUS_EXPR
, type
, var
, step
);
9212 tem
= build2_loc (loc
, MODIFY_EXPR
, void_type_node
,
9214 append_to_statement_list_force (tem
, p
);
9215 tem
= build1 (LABEL_EXPR
, void_type_node
, cond_label
);
9216 append_to_statement_list (tem
, p
);
9217 tree cond
= fold_build2_loc (loc
, LT_EXPR
,
9221 = fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
9222 cond
, build_and_jump (&beg_label
),
9224 cond
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
9227 = fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
9228 cond
, build_and_jump (&beg_label
),
9230 tree osteptype
= TREE_TYPE (orig_step
);
9231 cond
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
9233 build_int_cst (osteptype
, 0));
9234 tem
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
9236 append_to_statement_list_force (tem
, p
);
9237 p
= &BIND_EXPR_BODY (bind
);
9241 last_iter
= TREE_PURPOSE (t
);
9242 if (TREE_CODE (TREE_VALUE (t
)) == COMPOUND_EXPR
)
9244 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t
),
9246 TREE_VALUE (t
) = TREE_OPERAND (TREE_VALUE (t
), 1);
9248 if (error_operand_p (TREE_VALUE (t
)))
9250 if (TREE_VALUE (t
) != null_pointer_node
)
9251 TREE_VALUE (t
) = build_fold_addr_expr (TREE_VALUE (t
));
9254 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, cnts
[i
],
9255 NULL_TREE
, NULL_TREE
);
9256 tree r2
= build4 (ARRAY_REF
, ptr_type_node
, array
, cnts
[5],
9257 NULL_TREE
, NULL_TREE
);
9258 r2
= build_fold_addr_expr_with_type (r2
, ptr_type_node
);
9259 tem
= build2_loc (OMP_CLAUSE_LOCATION (c
), MODIFY_EXPR
,
9260 void_type_node
, r
, r2
);
9261 append_to_statement_list_force (tem
, last_body
);
9262 tem
= build2_loc (OMP_CLAUSE_LOCATION (c
), MODIFY_EXPR
,
9263 void_type_node
, cnts
[i
],
9264 size_binop (PLUS_EXPR
, cnts
[i
],
9266 append_to_statement_list_force (tem
, last_body
);
9269 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, cnts
[i
],
9270 NULL_TREE
, NULL_TREE
);
9271 tem
= build2_loc (OMP_CLAUSE_LOCATION (c
), MODIFY_EXPR
,
9272 void_type_node
, r
, TREE_VALUE (t
));
9273 append_to_statement_list_force (tem
, last_body
);
9276 r
= build4 (ARRAY_REF
, ptr_type_node
, array
,
9277 size_binop (PLUS_EXPR
, cnts
[i
], size_int (1)),
9278 NULL_TREE
, NULL_TREE
);
9279 tem
= build_int_cst (ptr_type_node
, GOMP_DEPEND_INOUTSET
);
9280 tem
= build2_loc (OMP_CLAUSE_LOCATION (c
), MODIFY_EXPR
,
9281 void_type_node
, r
, tem
);
9282 append_to_statement_list_force (tem
, last_body
);
9284 tem
= build2_loc (OMP_CLAUSE_LOCATION (c
), MODIFY_EXPR
,
9285 void_type_node
, cnts
[i
],
9286 size_binop (PLUS_EXPR
, cnts
[i
],
9287 size_int (1 + (i
== 5))));
9288 append_to_statement_list_force (tem
, last_body
);
9289 TREE_VALUE (t
) = null_pointer_node
;
9295 gimplify_and_add (last_bind
, pre_p
);
9296 last_bind
= NULL_TREE
;
9298 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPOUND_EXPR
)
9300 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0), pre_p
,
9301 NULL
, is_gimple_val
, fb_rvalue
);
9302 OMP_CLAUSE_DECL (c
) = TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
9304 if (error_operand_p (OMP_CLAUSE_DECL (c
)))
9306 if (OMP_CLAUSE_DECL (c
) != null_pointer_node
)
9307 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (OMP_CLAUSE_DECL (c
));
9308 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
, NULL
,
9309 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
9313 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, cnts
[i
],
9314 NULL_TREE
, NULL_TREE
);
9315 tree r2
= build4 (ARRAY_REF
, ptr_type_node
, array
, cnts
[5],
9316 NULL_TREE
, NULL_TREE
);
9317 r2
= build_fold_addr_expr_with_type (r2
, ptr_type_node
);
9318 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
, r2
);
9319 gimplify_and_add (tem
, pre_p
);
9320 g
= gimple_build_assign (cnts
[i
], size_binop (PLUS_EXPR
,
9323 gimple_seq_add_stmt (pre_p
, g
);
9326 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, cnts
[i
],
9327 NULL_TREE
, NULL_TREE
);
9328 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
, OMP_CLAUSE_DECL (c
));
9329 gimplify_and_add (tem
, pre_p
);
9332 r
= build4 (ARRAY_REF
, ptr_type_node
, array
,
9333 size_binop (PLUS_EXPR
, cnts
[i
], size_int (1)),
9334 NULL_TREE
, NULL_TREE
);
9335 tem
= build_int_cst (ptr_type_node
, GOMP_DEPEND_INOUTSET
);
9336 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
, tem
);
9337 append_to_statement_list_force (tem
, last_body
);
9338 gimplify_and_add (tem
, pre_p
);
9340 g
= gimple_build_assign (cnts
[i
],
9341 size_binop (PLUS_EXPR
, cnts
[i
],
9342 size_int (1 + (i
== 5))));
9343 gimple_seq_add_stmt (pre_p
, g
);
9347 gimplify_and_add (last_bind
, pre_p
);
9348 tree cond
= boolean_false_node
;
9352 cond
= build2_loc (first_loc
, NE_EXPR
, boolean_type_node
, cnts
[0],
9353 size_binop_loc (first_loc
, PLUS_EXPR
, counts
[0],
9356 cond
= build2_loc (first_loc
, TRUTH_OR_EXPR
, boolean_type_node
, cond
,
9357 build2_loc (first_loc
, NE_EXPR
, boolean_type_node
,
9359 size_binop_loc (first_loc
, PLUS_EXPR
,
9365 tree prev
= size_int (5);
9366 for (i
= 0; i
< 5; i
++)
9370 prev
= size_binop_loc (first_loc
, PLUS_EXPR
, counts
[i
], prev
);
9371 cond
= build2_loc (first_loc
, TRUTH_OR_EXPR
, boolean_type_node
, cond
,
9372 build2_loc (first_loc
, NE_EXPR
, boolean_type_node
,
9373 cnts
[i
], unshare_expr (prev
)));
9376 tem
= build3_loc (first_loc
, COND_EXPR
, void_type_node
, cond
,
9377 build_call_expr_loc (first_loc
,
9378 builtin_decl_explicit (BUILT_IN_TRAP
),
9380 gimplify_and_add (tem
, pre_p
);
9381 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_DEPEND
);
9382 OMP_CLAUSE_DEPEND_KIND (c
) = OMP_CLAUSE_DEPEND_LAST
;
9383 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (array
);
9384 OMP_CLAUSE_CHAIN (c
) = *list_p
;
9389 /* True if mapping node C maps, or unmaps, a (Fortran) array descriptor. */
9392 omp_map_clause_descriptor_p (tree c
)
9394 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_MAP
)
9397 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_TO_PSET
)
9400 if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_RELEASE
9401 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DELETE
)
9402 && OMP_CLAUSE_RELEASE_DESCRIPTOR (c
))
9408 /* For a set of mappings describing an array section pointed to by a struct
9409 (or derived type, etc.) component, create an "alloc" or "release" node to
9410 insert into a list following a GOMP_MAP_STRUCT node. For some types of
9411 mapping (e.g. Fortran arrays with descriptors), an additional mapping may
9412 be created that is inserted into the list of mapping nodes attached to the
9413 directive being processed -- not part of the sorted list of nodes after
9416 CODE is the code of the directive being processed. GRP_START and GRP_END
9417 are the first and last of two or three nodes representing this array section
9418 mapping (e.g. a data movement node like GOMP_MAP_{TO,FROM}, optionally a
9419 GOMP_MAP_TO_PSET, and finally a GOMP_MAP_ALWAYS_POINTER). EXTRA_NODE is
9420 filled with the additional node described above, if needed.
9422 This function does not add the new nodes to any lists itself. It is the
9423 responsibility of the caller to do that. */
9426 build_omp_struct_comp_nodes (enum tree_code code
, tree grp_start
, tree grp_end
,
9429 enum gomp_map_kind mkind
9430 = (code
== OMP_TARGET_EXIT_DATA
|| code
== OACC_EXIT_DATA
)
9431 ? GOMP_MAP_RELEASE
: GOMP_MAP_ALLOC
;
9433 gcc_assert (grp_start
!= grp_end
);
9435 tree c2
= build_omp_clause (OMP_CLAUSE_LOCATION (grp_end
), OMP_CLAUSE_MAP
);
9436 OMP_CLAUSE_SET_MAP_KIND (c2
, mkind
);
9437 OMP_CLAUSE_DECL (c2
) = unshare_expr (OMP_CLAUSE_DECL (grp_end
));
9438 OMP_CLAUSE_CHAIN (c2
) = NULL_TREE
;
9439 tree grp_mid
= NULL_TREE
;
9440 if (OMP_CLAUSE_CHAIN (grp_start
) != grp_end
)
9441 grp_mid
= OMP_CLAUSE_CHAIN (grp_start
);
9443 if (grp_mid
&& omp_map_clause_descriptor_p (grp_mid
))
9444 OMP_CLAUSE_SIZE (c2
) = OMP_CLAUSE_SIZE (grp_mid
);
9446 OMP_CLAUSE_SIZE (c2
) = TYPE_SIZE_UNIT (ptr_type_node
);
9449 && OMP_CLAUSE_CODE (grp_mid
) == OMP_CLAUSE_MAP
9450 && OMP_CLAUSE_MAP_KIND (grp_mid
) == GOMP_MAP_ALWAYS_POINTER
)
9453 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end
), OMP_CLAUSE_MAP
);
9454 OMP_CLAUSE_SET_MAP_KIND (c3
, mkind
);
9455 OMP_CLAUSE_DECL (c3
) = unshare_expr (OMP_CLAUSE_DECL (grp_mid
));
9456 OMP_CLAUSE_SIZE (c3
) = TYPE_SIZE_UNIT (ptr_type_node
);
9457 OMP_CLAUSE_CHAIN (c3
) = NULL_TREE
;
9462 *extra_node
= NULL_TREE
;
9467 /* Strip ARRAY_REFS or an indirect ref off BASE, find the containing object,
9468 and set *BITPOSP and *POFFSETP to the bit offset of the access.
9469 If BASE_REF is non-NULL and the containing object is a reference, set
9470 *BASE_REF to that reference before dereferencing the object.
9471 If BASE_REF is NULL, check that the containing object is a COMPONENT_REF or
9472 has array type, else return NULL. */
9475 extract_base_bit_offset (tree base
, poly_int64
*bitposp
,
9476 poly_offset_int
*poffsetp
,
9477 bool *variable_offset
)
9480 poly_int64 bitsize
, bitpos
;
9482 int unsignedp
, reversep
, volatilep
= 0;
9483 poly_offset_int poffset
;
9487 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
, &mode
,
9488 &unsignedp
, &reversep
, &volatilep
);
9492 if (offset
&& poly_int_tree_p (offset
))
9494 poffset
= wi::to_poly_offset (offset
);
9495 *variable_offset
= false;
9500 *variable_offset
= (offset
!= NULL_TREE
);
9503 if (maybe_ne (bitpos
, 0))
9504 poffset
+= bits_to_bytes_round_down (bitpos
);
9507 *poffsetp
= poffset
;
9512 /* Used for topological sorting of mapping groups. UNVISITED means we haven't
9513 started processing the group yet. The TEMPORARY mark is used when we first
9514 encounter a group on a depth-first traversal, and the PERMANENT mark is used
9515 when we have processed all the group's children (i.e. all the base pointers
9516 referred to by the group's mapping nodes, recursively). */
9518 enum omp_tsort_mark
{
9524 /* Hash for trees based on operand_equal_p. Like tree_operand_hash
9525 but ignores side effects in the equality comparisons. */
9527 struct tree_operand_hash_no_se
: tree_operand_hash
9529 static inline bool equal (const value_type
&,
9530 const compare_type
&);
9534 tree_operand_hash_no_se::equal (const value_type
&t1
,
9535 const compare_type
&t2
)
9537 return operand_equal_p (t1
, t2
, OEP_MATCH_SIDE_EFFECTS
);
9540 /* A group of OMP_CLAUSE_MAP nodes that correspond to a single "map"
9543 struct omp_mapping_group
{
9546 omp_tsort_mark mark
;
9547 /* If we've removed the group but need to reindex, mark the group as
9550 /* The group points to an already-created "GOMP_MAP_STRUCT
9551 GOMP_MAP_ATTACH_DETACH" pair. */
9552 bool reprocess_struct
;
9553 /* The group should use "zero-length" allocations for pointers that are not
9554 mapped "to" on the same directive. */
9556 struct omp_mapping_group
*sibling
;
9557 struct omp_mapping_group
*next
;
9561 debug_mapping_group (omp_mapping_group
*grp
)
9563 tree tmp
= OMP_CLAUSE_CHAIN (grp
->grp_end
);
9564 OMP_CLAUSE_CHAIN (grp
->grp_end
) = NULL
;
9565 debug_generic_expr (*grp
->grp_start
);
9566 OMP_CLAUSE_CHAIN (grp
->grp_end
) = tmp
;
9569 /* Return the OpenMP "base pointer" of an expression EXPR, or NULL if there
9573 omp_get_base_pointer (tree expr
)
9575 while (TREE_CODE (expr
) == ARRAY_REF
9576 || TREE_CODE (expr
) == COMPONENT_REF
)
9577 expr
= TREE_OPERAND (expr
, 0);
9579 if (INDIRECT_REF_P (expr
)
9580 || (TREE_CODE (expr
) == MEM_REF
9581 && integer_zerop (TREE_OPERAND (expr
, 1))))
9583 expr
= TREE_OPERAND (expr
, 0);
9584 while (TREE_CODE (expr
) == COMPOUND_EXPR
)
9585 expr
= TREE_OPERAND (expr
, 1);
9586 if (TREE_CODE (expr
) == POINTER_PLUS_EXPR
)
9587 expr
= TREE_OPERAND (expr
, 0);
9588 if (TREE_CODE (expr
) == SAVE_EXPR
)
9589 expr
= TREE_OPERAND (expr
, 0);
9597 /* An attach or detach operation depends directly on the address being
9598 attached/detached. Return that address, or none if there are no
9599 attachments/detachments. */
9602 omp_get_attachment (omp_mapping_group
*grp
)
9604 tree node
= *grp
->grp_start
;
9606 switch (OMP_CLAUSE_MAP_KIND (node
))
9610 case GOMP_MAP_TOFROM
:
9611 case GOMP_MAP_ALWAYS_FROM
:
9612 case GOMP_MAP_ALWAYS_TO
:
9613 case GOMP_MAP_ALWAYS_TOFROM
:
9614 case GOMP_MAP_FORCE_FROM
:
9615 case GOMP_MAP_FORCE_TO
:
9616 case GOMP_MAP_FORCE_TOFROM
:
9617 case GOMP_MAP_FORCE_PRESENT
:
9618 case GOMP_MAP_PRESENT_ALLOC
:
9619 case GOMP_MAP_PRESENT_FROM
:
9620 case GOMP_MAP_PRESENT_TO
:
9621 case GOMP_MAP_PRESENT_TOFROM
:
9622 case GOMP_MAP_ALWAYS_PRESENT_FROM
:
9623 case GOMP_MAP_ALWAYS_PRESENT_TO
:
9624 case GOMP_MAP_ALWAYS_PRESENT_TOFROM
:
9625 case GOMP_MAP_ALLOC
:
9626 case GOMP_MAP_RELEASE
:
9627 case GOMP_MAP_DELETE
:
9628 case GOMP_MAP_FORCE_ALLOC
:
9629 if (node
== grp
->grp_end
)
9632 node
= OMP_CLAUSE_CHAIN (node
);
9633 if (node
&& omp_map_clause_descriptor_p (node
))
9635 gcc_assert (node
!= grp
->grp_end
);
9636 node
= OMP_CLAUSE_CHAIN (node
);
9639 switch (OMP_CLAUSE_MAP_KIND (node
))
9641 case GOMP_MAP_POINTER
:
9642 case GOMP_MAP_ALWAYS_POINTER
:
9643 case GOMP_MAP_FIRSTPRIVATE_POINTER
:
9644 case GOMP_MAP_FIRSTPRIVATE_REFERENCE
:
9645 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION
:
9648 case GOMP_MAP_ATTACH_DETACH
:
9649 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
:
9650 case GOMP_MAP_DETACH
:
9651 return OMP_CLAUSE_DECL (node
);
9654 internal_error ("unexpected mapping node");
9656 return error_mark_node
;
9658 case GOMP_MAP_TO_PSET
:
9659 gcc_assert (node
!= grp
->grp_end
);
9660 node
= OMP_CLAUSE_CHAIN (node
);
9661 if (OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_ATTACH
9662 || OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_DETACH
)
9663 return OMP_CLAUSE_DECL (node
);
9665 internal_error ("unexpected mapping node");
9666 return error_mark_node
;
9668 case GOMP_MAP_ATTACH
:
9669 case GOMP_MAP_DETACH
:
9670 node
= OMP_CLAUSE_CHAIN (node
);
9671 if (!node
|| *grp
->grp_start
== grp
->grp_end
)
9672 return OMP_CLAUSE_DECL (*grp
->grp_start
);
9673 if (OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_FIRSTPRIVATE_POINTER
9674 || OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
9675 return OMP_CLAUSE_DECL (*grp
->grp_start
);
9677 internal_error ("unexpected mapping node");
9678 return error_mark_node
;
9680 case GOMP_MAP_STRUCT
:
9681 case GOMP_MAP_STRUCT_UNORD
:
9682 case GOMP_MAP_FORCE_DEVICEPTR
:
9683 case GOMP_MAP_DEVICE_RESIDENT
:
9685 case GOMP_MAP_IF_PRESENT
:
9686 case GOMP_MAP_FIRSTPRIVATE
:
9687 case GOMP_MAP_FIRSTPRIVATE_INT
:
9688 case GOMP_MAP_USE_DEVICE_PTR
:
9689 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
:
9693 internal_error ("unexpected mapping node");
9696 return error_mark_node
;
9699 /* Given a pointer START_P to the start of a group of related (e.g. pointer)
9700 mappings, return the chain pointer to the end of that group in the list. */
9703 omp_group_last (tree
*start_p
)
9705 tree c
= *start_p
, nc
, *grp_last_p
= start_p
;
9707 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
);
9709 nc
= OMP_CLAUSE_CHAIN (c
);
9711 if (!nc
|| OMP_CLAUSE_CODE (nc
) != OMP_CLAUSE_MAP
)
9714 switch (OMP_CLAUSE_MAP_KIND (c
))
9718 && OMP_CLAUSE_CODE (nc
) == OMP_CLAUSE_MAP
9719 && (OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9720 || OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_FIRSTPRIVATE_POINTER
9721 || OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_ATTACH_DETACH
9722 || OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_POINTER
9723 || (OMP_CLAUSE_MAP_KIND (nc
)
9724 == GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION
)
9725 || (OMP_CLAUSE_MAP_KIND (nc
)
9726 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
)
9727 || OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_DETACH
9728 || OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_ALWAYS_POINTER
9729 || omp_map_clause_descriptor_p (nc
)))
9731 tree nc2
= OMP_CLAUSE_CHAIN (nc
);
9732 if (OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_DETACH
)
9734 /* In the specific case we're doing "exit data" on an array
9735 slice of a reference-to-pointer struct component, we will see
9736 DETACH followed by ATTACH_DETACH here. We want to treat that
9737 as a single group. In other cases DETACH might represent a
9738 stand-alone "detach" clause, so we don't want to consider
9739 that part of the group. */
9741 && OMP_CLAUSE_CODE (nc2
) == OMP_CLAUSE_MAP
9742 && OMP_CLAUSE_MAP_KIND (nc2
) == GOMP_MAP_ATTACH_DETACH
)
9743 goto consume_two_nodes
;
9748 && OMP_CLAUSE_CODE (nc2
) == OMP_CLAUSE_MAP
9749 && (OMP_CLAUSE_MAP_KIND (nc
)
9750 == GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION
)
9751 && OMP_CLAUSE_MAP_KIND (nc2
) == GOMP_MAP_ATTACH
)
9754 grp_last_p
= &OMP_CLAUSE_CHAIN (nc
);
9756 nc
= OMP_CLAUSE_CHAIN (nc2
);
9760 grp_last_p
= &OMP_CLAUSE_CHAIN (c
);
9767 case GOMP_MAP_ATTACH
:
9768 case GOMP_MAP_DETACH
:
9769 /* This is a weird artifact of how directives are parsed: bare attach or
9770 detach clauses get a subsequent (meaningless) FIRSTPRIVATE_POINTER or
9771 FIRSTPRIVATE_REFERENCE node. FIXME. */
9773 && OMP_CLAUSE_CODE (nc
) == OMP_CLAUSE_MAP
9774 && (OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9775 || OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_FIRSTPRIVATE_POINTER
))
9776 grp_last_p
= &OMP_CLAUSE_CHAIN (c
);
9779 case GOMP_MAP_TO_PSET
:
9780 if (OMP_CLAUSE_CODE (nc
) == OMP_CLAUSE_MAP
9781 && (OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_ATTACH
9782 || OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_DETACH
))
9783 grp_last_p
= &OMP_CLAUSE_CHAIN (c
);
9786 case GOMP_MAP_STRUCT
:
9787 case GOMP_MAP_STRUCT_UNORD
:
9789 unsigned HOST_WIDE_INT num_mappings
9790 = tree_to_uhwi (OMP_CLAUSE_SIZE (c
));
9791 if (OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_FIRSTPRIVATE_POINTER
9792 || OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9793 || OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_ATTACH_DETACH
)
9794 grp_last_p
= &OMP_CLAUSE_CHAIN (*grp_last_p
);
9795 for (unsigned i
= 0; i
< num_mappings
; i
++)
9796 grp_last_p
= &OMP_CLAUSE_CHAIN (*grp_last_p
);
9804 /* Walk through LIST_P, and return a list of groups of mappings found (e.g.
9805 OMP_CLAUSE_MAP with GOMP_MAP_{TO/FROM/TOFROM} followed by one or two
9806 associated GOMP_MAP_POINTER mappings). Return a vector of omp_mapping_group
9807 if we have more than one such group, else return NULL. */
9810 omp_gather_mapping_groups_1 (tree
*list_p
, vec
<omp_mapping_group
> *groups
,
9811 tree gather_sentinel
)
9813 for (tree
*cp
= list_p
;
9814 *cp
&& *cp
!= gather_sentinel
;
9815 cp
= &OMP_CLAUSE_CHAIN (*cp
))
9817 if (OMP_CLAUSE_CODE (*cp
) != OMP_CLAUSE_MAP
)
9820 tree
*grp_last_p
= omp_group_last (cp
);
9821 omp_mapping_group grp
;
9824 grp
.grp_end
= *grp_last_p
;
9825 grp
.mark
= UNVISITED
;
9827 grp
.deleted
= false;
9828 grp
.reprocess_struct
= false;
9829 grp
.fragile
= false;
9831 groups
->safe_push (grp
);
9837 static vec
<omp_mapping_group
> *
9838 omp_gather_mapping_groups (tree
*list_p
)
9840 vec
<omp_mapping_group
> *groups
= new vec
<omp_mapping_group
> ();
9842 omp_gather_mapping_groups_1 (list_p
, groups
, NULL_TREE
);
9844 if (groups
->length () > 0)
9853 /* A pointer mapping group GRP may define a block of memory starting at some
9854 base address, and maybe also define a firstprivate pointer or firstprivate
9855 reference that points to that block. The return value is a node containing
9856 the former, and the *FIRSTPRIVATE pointer is set if we have the latter.
9857 If we define several base pointers, i.e. for a GOMP_MAP_STRUCT mapping,
9858 return the number of consecutive chained nodes in CHAINED. */
9861 omp_group_base (omp_mapping_group
*grp
, unsigned int *chained
,
9864 tree node
= *grp
->grp_start
;
9866 *firstprivate
= NULL_TREE
;
9869 switch (OMP_CLAUSE_MAP_KIND (node
))
9873 case GOMP_MAP_TOFROM
:
9874 case GOMP_MAP_ALWAYS_FROM
:
9875 case GOMP_MAP_ALWAYS_TO
:
9876 case GOMP_MAP_ALWAYS_TOFROM
:
9877 case GOMP_MAP_FORCE_FROM
:
9878 case GOMP_MAP_FORCE_TO
:
9879 case GOMP_MAP_FORCE_TOFROM
:
9880 case GOMP_MAP_FORCE_PRESENT
:
9881 case GOMP_MAP_PRESENT_ALLOC
:
9882 case GOMP_MAP_PRESENT_FROM
:
9883 case GOMP_MAP_PRESENT_TO
:
9884 case GOMP_MAP_PRESENT_TOFROM
:
9885 case GOMP_MAP_ALWAYS_PRESENT_FROM
:
9886 case GOMP_MAP_ALWAYS_PRESENT_TO
:
9887 case GOMP_MAP_ALWAYS_PRESENT_TOFROM
:
9888 case GOMP_MAP_ALLOC
:
9889 case GOMP_MAP_RELEASE
:
9890 case GOMP_MAP_DELETE
:
9891 case GOMP_MAP_FORCE_ALLOC
:
9892 case GOMP_MAP_IF_PRESENT
:
9893 if (node
== grp
->grp_end
)
9896 node
= OMP_CLAUSE_CHAIN (node
);
9898 internal_error ("unexpected mapping node");
9899 if (omp_map_clause_descriptor_p (node
))
9901 if (node
== grp
->grp_end
)
9902 return *grp
->grp_start
;
9903 node
= OMP_CLAUSE_CHAIN (node
);
9905 switch (OMP_CLAUSE_MAP_KIND (node
))
9907 case GOMP_MAP_POINTER
:
9908 case GOMP_MAP_FIRSTPRIVATE_POINTER
:
9909 case GOMP_MAP_FIRSTPRIVATE_REFERENCE
:
9910 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION
:
9911 *firstprivate
= OMP_CLAUSE_DECL (node
);
9912 return *grp
->grp_start
;
9914 case GOMP_MAP_ALWAYS_POINTER
:
9915 case GOMP_MAP_ATTACH_DETACH
:
9916 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
:
9917 case GOMP_MAP_DETACH
:
9918 return *grp
->grp_start
;
9921 internal_error ("unexpected mapping node");
9923 return error_mark_node
;
9925 case GOMP_MAP_TO_PSET
:
9926 gcc_assert (node
!= grp
->grp_end
);
9927 node
= OMP_CLAUSE_CHAIN (node
);
9928 if (OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_ATTACH
9929 || OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_DETACH
)
9932 internal_error ("unexpected mapping node");
9933 return error_mark_node
;
9935 case GOMP_MAP_ATTACH
:
9936 case GOMP_MAP_DETACH
:
9937 node
= OMP_CLAUSE_CHAIN (node
);
9938 if (!node
|| *grp
->grp_start
== grp
->grp_end
)
9940 if (OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_FIRSTPRIVATE_POINTER
9941 || OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
9943 /* We're mapping the base pointer itself in a bare attach or detach
9944 node. This is a side effect of how parsing works, and the mapping
9945 will be removed anyway (at least for enter/exit data directives).
9946 We should ignore the mapping here. FIXME. */
9950 internal_error ("unexpected mapping node");
9951 return error_mark_node
;
9953 case GOMP_MAP_STRUCT
:
9954 case GOMP_MAP_STRUCT_UNORD
:
9956 unsigned HOST_WIDE_INT num_mappings
9957 = tree_to_uhwi (OMP_CLAUSE_SIZE (node
));
9958 node
= OMP_CLAUSE_CHAIN (node
);
9959 if (OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_FIRSTPRIVATE_POINTER
9960 || OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
9962 *firstprivate
= OMP_CLAUSE_DECL (node
);
9963 node
= OMP_CLAUSE_CHAIN (node
);
9965 else if (OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_ATTACH_DETACH
)
9966 node
= OMP_CLAUSE_CHAIN (node
);
9967 *chained
= num_mappings
;
9971 case GOMP_MAP_FORCE_DEVICEPTR
:
9972 case GOMP_MAP_DEVICE_RESIDENT
:
9974 case GOMP_MAP_FIRSTPRIVATE
:
9975 case GOMP_MAP_FIRSTPRIVATE_INT
:
9976 case GOMP_MAP_USE_DEVICE_PTR
:
9977 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
:
9980 case GOMP_MAP_FIRSTPRIVATE_POINTER
:
9981 case GOMP_MAP_FIRSTPRIVATE_REFERENCE
:
9982 case GOMP_MAP_POINTER
:
9983 case GOMP_MAP_ALWAYS_POINTER
:
9984 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION
:
9985 /* These shouldn't appear by themselves. */
9987 internal_error ("unexpected pointer mapping node");
9988 return error_mark_node
;
9994 return error_mark_node
;
9997 /* Given a vector of omp_mapping_groups, build a hash table so we can look up
9998 nodes by tree_operand_hash_no_se. */
10001 omp_index_mapping_groups_1 (hash_map
<tree_operand_hash_no_se
,
10002 omp_mapping_group
*> *grpmap
,
10003 vec
<omp_mapping_group
> *groups
,
10004 tree reindex_sentinel
)
10006 omp_mapping_group
*grp
;
10008 bool reindexing
= reindex_sentinel
!= NULL_TREE
, above_hwm
= false;
10010 FOR_EACH_VEC_ELT (*groups
, i
, grp
)
10012 if (reindexing
&& *grp
->grp_start
== reindex_sentinel
)
10015 if (reindexing
&& !above_hwm
)
10018 if (grp
->reprocess_struct
)
10022 unsigned int chained
;
10023 tree node
= omp_group_base (grp
, &chained
, &fpp
);
10025 if (node
== error_mark_node
|| (!node
&& !fpp
))
10028 for (unsigned j
= 0;
10029 node
&& j
< chained
;
10030 node
= OMP_CLAUSE_CHAIN (node
), j
++)
10032 tree decl
= OMP_CLAUSE_DECL (node
);
10033 /* Sometimes we see zero-offset MEM_REF instead of INDIRECT_REF,
10034 meaning node-hash lookups don't work. This is a workaround for
10035 that, but ideally we should just create the INDIRECT_REF at
10036 source instead. FIXME. */
10037 if (TREE_CODE (decl
) == MEM_REF
10038 && integer_zerop (TREE_OPERAND (decl
, 1)))
10039 decl
= build_fold_indirect_ref (TREE_OPERAND (decl
, 0));
10041 omp_mapping_group
**prev
= grpmap
->get (decl
);
10043 if (prev
&& *prev
== grp
)
10047 /* Mapping the same thing twice is normally diagnosed as an error,
10048 but can happen under some circumstances, e.g. in pr99928-16.c,
10051 #pragma omp target simd reduction(+:a[:3]) \
10052 map(always, tofrom: a[:6])
10055 will result in two "a[0]" mappings (of different sizes). */
10057 grp
->sibling
= (*prev
)->sibling
;
10058 (*prev
)->sibling
= grp
;
10061 grpmap
->put (decl
, grp
);
10067 omp_mapping_group
**prev
= grpmap
->get (fpp
);
10068 if (prev
&& *prev
!= grp
)
10070 grp
->sibling
= (*prev
)->sibling
;
10071 (*prev
)->sibling
= grp
;
10074 grpmap
->put (fpp
, grp
);
10078 static hash_map
<tree_operand_hash_no_se
, omp_mapping_group
*> *
10079 omp_index_mapping_groups (vec
<omp_mapping_group
> *groups
)
10081 hash_map
<tree_operand_hash_no_se
, omp_mapping_group
*> *grpmap
10082 = new hash_map
<tree_operand_hash_no_se
, omp_mapping_group
*>;
10084 omp_index_mapping_groups_1 (grpmap
, groups
, NULL_TREE
);
10089 /* Rebuild group map from partially-processed clause list (during
10090 omp_build_struct_sibling_lists). We have already processed nodes up until
10091 a high-water mark (HWM). This is a bit tricky because the list is being
10092 reordered as it is scanned, but we know:
10094 1. The list after HWM has not been touched yet, so we can reindex it safely.
10096 2. The list before and including HWM has been altered, but remains
10097 well-formed throughout the sibling-list building operation.
10099 so, we can do the reindex operation in two parts, on the processed and
10100 then the unprocessed halves of the list. */
10102 static hash_map
<tree_operand_hash_no_se
, omp_mapping_group
*> *
10103 omp_reindex_mapping_groups (tree
*list_p
,
10104 vec
<omp_mapping_group
> *groups
,
10105 vec
<omp_mapping_group
> *processed_groups
,
10108 hash_map
<tree_operand_hash_no_se
, omp_mapping_group
*> *grpmap
10109 = new hash_map
<tree_operand_hash_no_se
, omp_mapping_group
*>;
10111 processed_groups
->truncate (0);
10113 omp_gather_mapping_groups_1 (list_p
, processed_groups
, sentinel
);
10114 omp_index_mapping_groups_1 (grpmap
, processed_groups
, NULL_TREE
);
10116 omp_index_mapping_groups_1 (grpmap
, groups
, sentinel
);
10121 /* Find the immediately-containing struct for a component ref (etc.)
10122 expression EXPR. */
10125 omp_containing_struct (tree expr
)
10131 /* Note: don't strip NOPs unless we're also stripping off array refs or a
10133 if (TREE_CODE (expr
) != ARRAY_REF
&& TREE_CODE (expr
) != COMPONENT_REF
)
10136 while (TREE_CODE (expr
) == ARRAY_REF
)
10137 expr
= TREE_OPERAND (expr
, 0);
10139 if (TREE_CODE (expr
) == COMPONENT_REF
)
10140 expr
= TREE_OPERAND (expr
, 0);
10145 /* Return TRUE if DECL describes a component that is part of a whole structure
10146 that is mapped elsewhere in GRPMAP. *MAPPED_BY_GROUP is set to the group
10147 that maps that structure, if present. */
10150 omp_mapped_by_containing_struct (hash_map
<tree_operand_hash_no_se
,
10151 omp_mapping_group
*> *grpmap
,
10153 omp_mapping_group
**mapped_by_group
)
10155 tree wsdecl
= NULL_TREE
;
10157 *mapped_by_group
= NULL
;
10161 wsdecl
= omp_containing_struct (decl
);
10162 if (wsdecl
== decl
)
10164 omp_mapping_group
**wholestruct
= grpmap
->get (wsdecl
);
10166 && TREE_CODE (wsdecl
) == MEM_REF
10167 && integer_zerop (TREE_OPERAND (wsdecl
, 1)))
10169 tree deref
= TREE_OPERAND (wsdecl
, 0);
10170 deref
= build_fold_indirect_ref (deref
);
10171 wholestruct
= grpmap
->get (deref
);
10175 *mapped_by_group
= *wholestruct
;
10184 /* Helper function for omp_tsort_mapping_groups. Returns TRUE on success, or
10188 omp_tsort_mapping_groups_1 (omp_mapping_group
***outlist
,
10189 vec
<omp_mapping_group
> *groups
,
10190 hash_map
<tree_operand_hash_no_se
,
10191 omp_mapping_group
*> *grpmap
,
10192 omp_mapping_group
*grp
)
10194 if (grp
->mark
== PERMANENT
)
10196 if (grp
->mark
== TEMPORARY
)
10198 fprintf (stderr
, "when processing group:\n");
10199 debug_mapping_group (grp
);
10200 internal_error ("base pointer cycle detected");
10203 grp
->mark
= TEMPORARY
;
10205 tree attaches_to
= omp_get_attachment (grp
);
10209 omp_mapping_group
**basep
= grpmap
->get (attaches_to
);
10211 if (basep
&& *basep
!= grp
)
10213 for (omp_mapping_group
*w
= *basep
; w
; w
= w
->sibling
)
10214 if (!omp_tsort_mapping_groups_1 (outlist
, groups
, grpmap
, w
))
10219 tree decl
= OMP_CLAUSE_DECL (*grp
->grp_start
);
10223 tree base
= omp_get_base_pointer (decl
);
10228 omp_mapping_group
**innerp
= grpmap
->get (base
);
10229 omp_mapping_group
*wholestruct
;
10231 /* We should treat whole-structure mappings as if all (pointer, in this
10232 case) members are mapped as individual list items. Check if we have
10233 such a whole-structure mapping, if we don't have an explicit reference
10234 to the pointer member itself. */
10236 && TREE_CODE (base
) == COMPONENT_REF
10237 && omp_mapped_by_containing_struct (grpmap
, base
, &wholestruct
))
10238 innerp
= &wholestruct
;
10240 if (innerp
&& *innerp
!= grp
)
10242 for (omp_mapping_group
*w
= *innerp
; w
; w
= w
->sibling
)
10243 if (!omp_tsort_mapping_groups_1 (outlist
, groups
, grpmap
, w
))
10251 grp
->mark
= PERMANENT
;
10253 /* Emit grp to output list. */
10256 *outlist
= &grp
->next
;
10261 /* Topologically sort GROUPS, so that OMP 5.0-defined base pointers come
10262 before mappings that use those pointers. This is an implementation of the
10263 depth-first search algorithm, described e.g. at:
10265 https://en.wikipedia.org/wiki/Topological_sorting
10268 static omp_mapping_group
*
10269 omp_tsort_mapping_groups (vec
<omp_mapping_group
> *groups
,
10270 hash_map
<tree_operand_hash_no_se
, omp_mapping_group
*>
10272 bool enter_exit_data
)
10274 omp_mapping_group
*grp
, *outlist
= NULL
, **cursor
;
10276 bool saw_runtime_implicit
= false;
10280 FOR_EACH_VEC_ELT (*groups
, i
, grp
)
10282 if (grp
->mark
!= PERMANENT
)
10284 if (OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (*grp
->grp_start
))
10286 saw_runtime_implicit
= true;
10289 if (!omp_tsort_mapping_groups_1 (&cursor
, groups
, grpmap
, grp
))
10294 if (!saw_runtime_implicit
)
10297 FOR_EACH_VEC_ELT (*groups
, i
, grp
)
10299 if (grp
->mark
!= PERMANENT
10300 && OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (*grp
->grp_start
))
10302 /* Clear the flag for enter/exit data because it is currently
10303 meaningless for those operations in libgomp. */
10304 if (enter_exit_data
)
10305 OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (*grp
->grp_start
) = 0;
10307 if (!omp_tsort_mapping_groups_1 (&cursor
, groups
, grpmap
, grp
))
10315 /* Split INLIST into three parts:
10317 - "present" alloc/to/from groups
10318 - other to/from groups
10319 - other alloc/release/delete groups
10321 These sub-lists are then concatenated together to form the final list.
10322 Each sub-list retains the order of the original list.
10323 Note that ATTACH nodes are later moved to the end of the list in
10324 gimplify_adjust_omp_clauses, for target regions. */
10326 static omp_mapping_group
*
10327 omp_segregate_mapping_groups (omp_mapping_group
*inlist
)
10329 omp_mapping_group
*ard_groups
= NULL
, *tf_groups
= NULL
;
10330 omp_mapping_group
*p_groups
= NULL
;
10331 omp_mapping_group
**ard_tail
= &ard_groups
, **tf_tail
= &tf_groups
;
10332 omp_mapping_group
**p_tail
= &p_groups
;
10334 for (omp_mapping_group
*w
= inlist
; w
;)
10336 tree c
= *w
->grp_start
;
10337 omp_mapping_group
*next
= w
->next
;
10339 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
);
10341 switch (OMP_CLAUSE_MAP_KIND (c
))
10343 case GOMP_MAP_ALLOC
:
10344 case GOMP_MAP_RELEASE
:
10345 case GOMP_MAP_DELETE
:
10348 ard_tail
= &w
->next
;
10351 /* These map types are all semantically identical, so are moved into a
10352 single group. They will each be changed into GOMP_MAP_FORCE_PRESENT
10353 in gimplify_adjust_omp_clauses. */
10354 case GOMP_MAP_PRESENT_ALLOC
:
10355 case GOMP_MAP_PRESENT_FROM
:
10356 case GOMP_MAP_PRESENT_TO
:
10357 case GOMP_MAP_PRESENT_TOFROM
:
10366 tf_tail
= &w
->next
;
10372 /* Now splice the lists together... */
10373 *tf_tail
= ard_groups
;
10374 *p_tail
= tf_groups
;
10379 /* Given a list LIST_P containing groups of mappings given by GROUPS, reorder
10380 those groups based on the output list of omp_tsort_mapping_groups --
10381 singly-linked, threaded through each element's NEXT pointer starting at
10382 HEAD. Each list element appears exactly once in that linked list.
10384 Each element of GROUPS may correspond to one or several mapping nodes.
10385 Node groups are kept together, and in the reordered list, the positions of
10386 the original groups are reused for the positions of the reordered list.
10387 Hence if we have e.g.
10389 {to ptr ptr} firstprivate {tofrom ptr} ...
10391 first group non-"map" second group
10393 and say the second group contains a base pointer for the first so must be
10394 moved before it, the resulting list will contain:
10396 {tofrom ptr} firstprivate {to ptr ptr} ...
10397 ^ prev. second group ^ prev. first group
10401 omp_reorder_mapping_groups (vec
<omp_mapping_group
> *groups
,
10402 omp_mapping_group
*head
,
10405 omp_mapping_group
*grp
;
10407 unsigned numgroups
= groups
->length ();
10408 auto_vec
<tree
> old_heads (numgroups
);
10409 auto_vec
<tree
*> old_headps (numgroups
);
10410 auto_vec
<tree
> new_heads (numgroups
);
10411 auto_vec
<tree
> old_succs (numgroups
);
10412 bool map_at_start
= (list_p
== (*groups
)[0].grp_start
);
10414 tree
*new_grp_tail
= NULL
;
10416 /* Stash the start & end nodes of each mapping group before we start
10417 modifying the list. */
10418 FOR_EACH_VEC_ELT (*groups
, i
, grp
)
10420 old_headps
.quick_push (grp
->grp_start
);
10421 old_heads
.quick_push (*grp
->grp_start
);
10422 old_succs
.quick_push (OMP_CLAUSE_CHAIN (grp
->grp_end
));
10425 /* And similarly, the heads of the groups in the order we want to rearrange
10427 for (omp_mapping_group
*w
= head
; w
; w
= w
->next
)
10428 new_heads
.quick_push (*w
->grp_start
);
10430 FOR_EACH_VEC_ELT (*groups
, i
, grp
)
10434 if (new_grp_tail
&& old_succs
[i
- 1] == old_heads
[i
])
10436 /* a {b c d} {e f g} h i j (original)
10438 a {k l m} {e f g} h i j (inserted new group on last iter)
10440 a {k l m} {n o p} h i j (this time, chain last group to new one)
10443 *new_grp_tail
= new_heads
[i
];
10445 else if (new_grp_tail
)
10447 /* a {b c d} e {f g h} i j k (original)
10449 a {l m n} e {f g h} i j k (gap after last iter's group)
10451 a {l m n} e {o p q} h i j (chain last group to old successor)
10454 *new_grp_tail
= old_succs
[i
- 1];
10455 *old_headps
[i
] = new_heads
[i
];
10459 /* The first inserted group -- point to new group, and leave end
10465 *grp
->grp_start
= new_heads
[i
];
10468 new_grp_tail
= &OMP_CLAUSE_CHAIN (head
->grp_end
);
10474 *new_grp_tail
= old_succs
[numgroups
- 1];
10476 gcc_assert (!head
);
10478 return map_at_start
? (*groups
)[0].grp_start
: list_p
;
10481 /* DECL is supposed to have lastprivate semantics in the outer contexts
10482 of combined/composite constructs, starting with OCTX.
10483 Add needed lastprivate, shared or map clause if no data sharing or
10484 mapping clause are present. IMPLICIT_P is true if it is an implicit
10485 clause (IV on simd), in which case the lastprivate will not be
10486 copied to some constructs. */
10489 omp_lastprivate_for_combined_outer_constructs (struct gimplify_omp_ctx
*octx
,
10490 tree decl
, bool implicit_p
)
10492 struct gimplify_omp_ctx
*orig_octx
= octx
;
10493 for (; octx
; octx
= octx
->outer_context
)
10495 if ((octx
->region_type
== ORT_COMBINED_PARALLEL
10496 || (octx
->region_type
& ORT_COMBINED_TEAMS
) == ORT_COMBINED_TEAMS
)
10497 && splay_tree_lookup (octx
->variables
,
10498 (splay_tree_key
) decl
) == NULL
)
10500 omp_add_variable (octx
, decl
, GOVD_SHARED
| GOVD_SEEN
);
10503 if ((octx
->region_type
& ORT_TASK
) != 0
10504 && octx
->combined_loop
10505 && splay_tree_lookup (octx
->variables
,
10506 (splay_tree_key
) decl
) == NULL
)
10508 omp_add_variable (octx
, decl
, GOVD_LASTPRIVATE
| GOVD_SEEN
);
10512 && octx
->region_type
== ORT_WORKSHARE
10513 && octx
->combined_loop
10514 && splay_tree_lookup (octx
->variables
,
10515 (splay_tree_key
) decl
) == NULL
10516 && octx
->outer_context
10517 && octx
->outer_context
->region_type
== ORT_COMBINED_PARALLEL
10518 && splay_tree_lookup (octx
->outer_context
->variables
,
10519 (splay_tree_key
) decl
) == NULL
)
10521 octx
= octx
->outer_context
;
10522 omp_add_variable (octx
, decl
, GOVD_LASTPRIVATE
| GOVD_SEEN
);
10525 if ((octx
->region_type
== ORT_WORKSHARE
|| octx
->region_type
== ORT_ACC
)
10526 && octx
->combined_loop
10527 && splay_tree_lookup (octx
->variables
,
10528 (splay_tree_key
) decl
) == NULL
10529 && !omp_check_private (octx
, decl
, false))
10531 omp_add_variable (octx
, decl
, GOVD_LASTPRIVATE
| GOVD_SEEN
);
10534 if (octx
->region_type
== ORT_COMBINED_TARGET
)
10536 splay_tree_node n
= splay_tree_lookup (octx
->variables
,
10537 (splay_tree_key
) decl
);
10540 omp_add_variable (octx
, decl
, GOVD_MAP
| GOVD_SEEN
);
10541 octx
= octx
->outer_context
;
10543 else if (!implicit_p
10544 && (n
->value
& GOVD_FIRSTPRIVATE_IMPLICIT
))
10546 n
->value
&= ~(GOVD_FIRSTPRIVATE
10547 | GOVD_FIRSTPRIVATE_IMPLICIT
10549 omp_add_variable (octx
, decl
, GOVD_MAP
| GOVD_SEEN
);
10550 octx
= octx
->outer_context
;
10555 if (octx
&& (implicit_p
|| octx
!= orig_octx
))
10556 omp_notice_variable (octx
, decl
, true);
10559 /* We might have indexed several groups for DECL, e.g. a "TO" mapping and also
10560 a "FIRSTPRIVATE" mapping. Return the one that isn't firstprivate, etc. */
10562 static omp_mapping_group
*
10563 omp_get_nonfirstprivate_group (hash_map
<tree_operand_hash_no_se
,
10564 omp_mapping_group
*> *grpmap
,
10565 tree decl
, bool allow_deleted
= false)
10567 omp_mapping_group
**to_group_p
= grpmap
->get (decl
);
10572 omp_mapping_group
*to_group
= *to_group_p
;
10574 for (; to_group
; to_group
= to_group
->sibling
)
10576 tree grp_end
= to_group
->grp_end
;
10577 switch (OMP_CLAUSE_MAP_KIND (grp_end
))
10579 case GOMP_MAP_FIRSTPRIVATE_POINTER
:
10580 case GOMP_MAP_FIRSTPRIVATE_REFERENCE
:
10584 if (allow_deleted
|| !to_group
->deleted
)
10592 /* Return TRUE if the directive (whose clauses are described by the hash table
10593 of mapping groups, GRPMAP) maps DECL explicitly. If TO_SPECIFICALLY is
10594 true, only count TO mappings. If ALLOW_DELETED is true, ignore the
10595 "deleted" flag for groups. If CONTAINED_IN_STRUCT is true, also return
10596 TRUE if DECL is mapped as a member of a whole-struct mapping. */
10599 omp_directive_maps_explicitly (hash_map
<tree_operand_hash_no_se
,
10600 omp_mapping_group
*> *grpmap
,
10601 tree decl
, omp_mapping_group
**base_group
,
10602 bool to_specifically
, bool allow_deleted
,
10603 bool contained_in_struct
)
10605 omp_mapping_group
*decl_group
10606 = omp_get_nonfirstprivate_group (grpmap
, decl
, allow_deleted
);
10608 *base_group
= NULL
;
10612 tree grp_first
= *decl_group
->grp_start
;
10613 /* We might be called during omp_build_struct_sibling_lists, when
10614 GOMP_MAP_STRUCT might have been inserted at the start of the group.
10615 Skip over that, and also possibly the node after it. */
10616 if (OMP_CLAUSE_MAP_KIND (grp_first
) == GOMP_MAP_STRUCT
10617 || OMP_CLAUSE_MAP_KIND (grp_first
) == GOMP_MAP_STRUCT_UNORD
)
10619 grp_first
= OMP_CLAUSE_CHAIN (grp_first
);
10620 if (OMP_CLAUSE_MAP_KIND (grp_first
) == GOMP_MAP_FIRSTPRIVATE_POINTER
10621 || (OMP_CLAUSE_MAP_KIND (grp_first
)
10622 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
10623 || OMP_CLAUSE_MAP_KIND (grp_first
) == GOMP_MAP_ATTACH_DETACH
)
10624 grp_first
= OMP_CLAUSE_CHAIN (grp_first
);
10626 enum gomp_map_kind first_kind
= OMP_CLAUSE_MAP_KIND (grp_first
);
10627 if (!to_specifically
10628 || GOMP_MAP_COPY_TO_P (first_kind
)
10629 || first_kind
== GOMP_MAP_ALLOC
)
10631 *base_group
= decl_group
;
10636 if (contained_in_struct
10637 && omp_mapped_by_containing_struct (grpmap
, decl
, base_group
))
10643 /* If we have mappings INNER and OUTER, where INNER is a component access and
10644 OUTER is a mapping of the whole containing struct, check that the mappings
10645 are compatible. We'll be deleting the inner mapping, so we need to make
10646 sure the outer mapping does (at least) the same transfers to/from the device
10647 as the inner mapping. */
10650 omp_check_mapping_compatibility (location_t loc
,
10651 omp_mapping_group
*outer
,
10652 omp_mapping_group
*inner
)
10654 tree first_outer
= *outer
->grp_start
, first_inner
= *inner
->grp_start
;
10656 gcc_assert (OMP_CLAUSE_CODE (first_outer
) == OMP_CLAUSE_MAP
);
10657 gcc_assert (OMP_CLAUSE_CODE (first_inner
) == OMP_CLAUSE_MAP
);
10659 enum gomp_map_kind outer_kind
= OMP_CLAUSE_MAP_KIND (first_outer
);
10660 enum gomp_map_kind inner_kind
= OMP_CLAUSE_MAP_KIND (first_inner
);
10662 if (outer_kind
== inner_kind
)
10665 switch (outer_kind
)
10667 case GOMP_MAP_ALWAYS_TO
:
10668 if (inner_kind
== GOMP_MAP_FORCE_PRESENT
10669 || inner_kind
== GOMP_MAP_ALLOC
10670 || inner_kind
== GOMP_MAP_TO
)
10674 case GOMP_MAP_ALWAYS_FROM
:
10675 if (inner_kind
== GOMP_MAP_FORCE_PRESENT
10676 || inner_kind
== GOMP_MAP_RELEASE
10677 || inner_kind
== GOMP_MAP_FROM
)
10682 if (inner_kind
== GOMP_MAP_FORCE_PRESENT
10683 || inner_kind
== GOMP_MAP_ALLOC
)
10687 case GOMP_MAP_FROM
:
10688 if (inner_kind
== GOMP_MAP_RELEASE
10689 || inner_kind
== GOMP_MAP_FORCE_PRESENT
)
10693 case GOMP_MAP_ALWAYS_TOFROM
:
10694 case GOMP_MAP_TOFROM
:
10695 if (inner_kind
== GOMP_MAP_FORCE_PRESENT
10696 || inner_kind
== GOMP_MAP_ALLOC
10697 || inner_kind
== GOMP_MAP_TO
10698 || inner_kind
== GOMP_MAP_FROM
10699 || inner_kind
== GOMP_MAP_TOFROM
)
10707 error_at (loc
, "data movement for component %qE is not compatible with "
10708 "movement for struct %qE", OMP_CLAUSE_DECL (first_inner
),
10709 OMP_CLAUSE_DECL (first_outer
));
10714 /* This function handles several cases where clauses on a mapping directive
10715 can interact with each other.
10717 If we have a FIRSTPRIVATE_POINTER node and we're also mapping the pointer
10718 on the same directive, change the mapping of the first node to
10719 ATTACH_DETACH. We should have detected that this will happen already in
10720 c-omp.cc:c_omp_adjust_map_clauses and marked the appropriate decl
10721 as addressable. (If we didn't, bail out.)
10723 If we have a FIRSTPRIVATE_REFERENCE (for a reference to pointer) and we're
10724 mapping the base pointer also, we may need to change the mapping type to
10725 ATTACH_DETACH and synthesize an alloc node for the reference itself.
10727 If we have an ATTACH_DETACH node, this is an array section with a pointer
10728 base. If we're mapping the base on the same directive too, we can drop its
10729 mapping. However, if we have a reference to pointer, make other appropriate
10730 adjustments to the mapping nodes instead.
10732 If we have an ATTACH_DETACH node with a Fortran pointer-set (array
10733 descriptor) mapping for a derived-type component, and we're also mapping the
10734 whole of the derived-type variable on another clause, the pointer-set
10735 mapping is removed.
10737 If we have a component access but we're also mapping the whole of the
10738 containing struct, drop the former access.
10740 If the expression is a component access, and we're also mapping a base
10741 pointer used in that component access in the same expression, change the
10742 mapping type of the latter to ALLOC (ready for processing by
10743 omp_build_struct_sibling_lists). */
10746 omp_resolve_clause_dependencies (enum tree_code code
,
10747 vec
<omp_mapping_group
> *groups
,
10748 hash_map
<tree_operand_hash_no_se
,
10749 omp_mapping_group
*> *grpmap
)
10752 omp_mapping_group
*grp
;
10753 bool repair_chain
= false;
10755 FOR_EACH_VEC_ELT (*groups
, i
, grp
)
10757 tree grp_end
= grp
->grp_end
;
10758 tree decl
= OMP_CLAUSE_DECL (grp_end
);
10760 gcc_assert (OMP_CLAUSE_CODE (grp_end
) == OMP_CLAUSE_MAP
);
10762 switch (OMP_CLAUSE_MAP_KIND (grp_end
))
10764 case GOMP_MAP_FIRSTPRIVATE_POINTER
:
10766 omp_mapping_group
*to_group
10767 = omp_get_nonfirstprivate_group (grpmap
, decl
);
10769 if (!to_group
|| to_group
== grp
)
10772 tree grp_first
= *to_group
->grp_start
;
10773 enum gomp_map_kind first_kind
= OMP_CLAUSE_MAP_KIND (grp_first
);
10775 if ((GOMP_MAP_COPY_TO_P (first_kind
)
10776 || first_kind
== GOMP_MAP_ALLOC
)
10777 && (OMP_CLAUSE_MAP_KIND (to_group
->grp_end
)
10778 != GOMP_MAP_FIRSTPRIVATE_POINTER
))
10780 gcc_assert (TREE_ADDRESSABLE (OMP_CLAUSE_DECL (grp_end
)));
10781 OMP_CLAUSE_SET_MAP_KIND (grp_end
, GOMP_MAP_ATTACH_DETACH
);
10786 case GOMP_MAP_FIRSTPRIVATE_REFERENCE
:
10788 tree ptr
= build_fold_indirect_ref (decl
);
10790 omp_mapping_group
*to_group
10791 = omp_get_nonfirstprivate_group (grpmap
, ptr
);
10793 if (!to_group
|| to_group
== grp
)
10796 tree grp_first
= *to_group
->grp_start
;
10797 enum gomp_map_kind first_kind
= OMP_CLAUSE_MAP_KIND (grp_first
);
10799 if (GOMP_MAP_COPY_TO_P (first_kind
)
10800 || first_kind
== GOMP_MAP_ALLOC
)
10802 OMP_CLAUSE_SET_MAP_KIND (grp_end
, GOMP_MAP_ATTACH_DETACH
);
10803 OMP_CLAUSE_DECL (grp_end
) = ptr
;
10804 if ((OMP_CLAUSE_CHAIN (*to_group
->grp_start
)
10805 == to_group
->grp_end
)
10806 && (OMP_CLAUSE_MAP_KIND (to_group
->grp_end
)
10807 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
10809 gcc_assert (TREE_ADDRESSABLE
10810 (OMP_CLAUSE_DECL (to_group
->grp_end
)));
10811 OMP_CLAUSE_SET_MAP_KIND (to_group
->grp_end
,
10812 GOMP_MAP_ATTACH_DETACH
);
10814 location_t loc
= OMP_CLAUSE_LOCATION (to_group
->grp_end
);
10816 = build_omp_clause (loc
, OMP_CLAUSE_MAP
);
10817 OMP_CLAUSE_SET_MAP_KIND (alloc
, GOMP_MAP_ALLOC
);
10818 tree tmp
= build_fold_addr_expr (OMP_CLAUSE_DECL
10819 (to_group
->grp_end
));
10820 tree char_ptr_type
= build_pointer_type (char_type_node
);
10821 OMP_CLAUSE_DECL (alloc
)
10822 = build2 (MEM_REF
, char_type_node
,
10824 build_int_cst (char_ptr_type
, 0));
10825 OMP_CLAUSE_SIZE (alloc
) = TYPE_SIZE_UNIT (TREE_TYPE (tmp
));
10827 OMP_CLAUSE_CHAIN (alloc
)
10828 = OMP_CLAUSE_CHAIN (*to_group
->grp_start
);
10829 OMP_CLAUSE_CHAIN (*to_group
->grp_start
) = alloc
;
10835 case GOMP_MAP_ATTACH_DETACH
:
10836 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
:
10838 tree base_ptr
, referenced_ptr_node
= NULL_TREE
;
10840 while (TREE_CODE (decl
) == ARRAY_REF
)
10841 decl
= TREE_OPERAND (decl
, 0);
10843 if (TREE_CODE (decl
) == INDIRECT_REF
)
10844 decl
= TREE_OPERAND (decl
, 0);
10846 /* Only component accesses. */
10850 /* We want the pointer itself when checking if the base pointer is
10851 mapped elsewhere in the same directive -- if we have a
10852 reference to the pointer, don't use that. */
10854 if (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
10855 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == POINTER_TYPE
)
10857 referenced_ptr_node
= OMP_CLAUSE_CHAIN (*grp
->grp_start
);
10858 base_ptr
= OMP_CLAUSE_DECL (referenced_ptr_node
);
10863 gomp_map_kind zlas_kind
10864 = (code
== OACC_EXIT_DATA
|| code
== OMP_TARGET_EXIT_DATA
)
10865 ? GOMP_MAP_DETACH
: GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
;
10867 if (TREE_CODE (TREE_TYPE (base_ptr
)) == POINTER_TYPE
)
10869 /* If we map the base TO, and we're doing an attachment, we can
10870 skip the TO mapping altogether and create an ALLOC mapping
10871 instead, since the attachment will overwrite the device
10872 pointer in that location immediately anyway. Otherwise,
10873 change our mapping to
10874 GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION in case the
10875 attachment target has not been copied to the device already
10876 by some earlier directive. */
10878 bool base_mapped_to
= false;
10880 omp_mapping_group
*base_group
;
10882 if (omp_directive_maps_explicitly (grpmap
, base_ptr
,
10883 &base_group
, false, true,
10886 if (referenced_ptr_node
)
10888 base_mapped_to
= true;
10889 if ((OMP_CLAUSE_MAP_KIND (base_group
->grp_end
)
10890 == GOMP_MAP_ATTACH_DETACH
)
10891 && (OMP_CLAUSE_CHAIN (*base_group
->grp_start
)
10892 == base_group
->grp_end
))
10894 OMP_CLAUSE_CHAIN (*base_group
->grp_start
)
10895 = OMP_CLAUSE_CHAIN (base_group
->grp_end
);
10896 base_group
->grp_end
= *base_group
->grp_start
;
10897 repair_chain
= true;
10902 base_group
->deleted
= true;
10903 OMP_CLAUSE_ATTACHMENT_MAPPING_ERASED (grp_end
) = 1;
10907 /* We're dealing with a reference to a pointer, and we are
10908 attaching both the reference and the pointer. We know the
10909 reference itself is on the target, because we are going to
10910 create an ALLOC node for it in accumulate_sibling_list. The
10911 pointer might be on the target already or it might not, but
10912 if it isn't then it's not an error, so use
10913 GOMP_MAP_ATTACH_ZLAS for it. */
10914 if (!base_mapped_to
&& referenced_ptr_node
)
10915 OMP_CLAUSE_SET_MAP_KIND (referenced_ptr_node
, zlas_kind
);
10917 omp_mapping_group
*struct_group
;
10919 if ((desc
= OMP_CLAUSE_CHAIN (*grp
->grp_start
))
10920 && omp_map_clause_descriptor_p (desc
)
10921 && omp_mapped_by_containing_struct (grpmap
, decl
,
10923 /* If we have a pointer set but we're mapping (or unmapping)
10924 the whole of the containing struct, we can remove the
10925 pointer set mapping. */
10926 OMP_CLAUSE_CHAIN (*grp
->grp_start
) = OMP_CLAUSE_CHAIN (desc
);
10928 else if (TREE_CODE (TREE_TYPE (base_ptr
)) == REFERENCE_TYPE
10929 && (TREE_CODE (TREE_TYPE (TREE_TYPE (base_ptr
)))
10931 && OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION
10933 OMP_CLAUSE_SET_MAP_KIND (grp
->grp_end
, zlas_kind
);
10937 case GOMP_MAP_ATTACH
:
10938 /* Ignore standalone attach here. */
10943 omp_mapping_group
*struct_group
;
10944 if (omp_mapped_by_containing_struct (grpmap
, decl
, &struct_group
)
10945 && *grp
->grp_start
== grp_end
)
10947 omp_check_mapping_compatibility (OMP_CLAUSE_LOCATION (grp_end
),
10948 struct_group
, grp
);
10949 /* Remove the whole of this mapping -- redundant. */
10950 grp
->deleted
= true;
10954 while ((base
= omp_get_base_pointer (base
)))
10956 omp_mapping_group
*base_group
;
10958 if (omp_directive_maps_explicitly (grpmap
, base
, &base_group
,
10959 true, true, false))
10961 tree grp_first
= *base_group
->grp_start
;
10962 OMP_CLAUSE_SET_MAP_KIND (grp_first
, GOMP_MAP_ALLOC
);
10971 /* Group start pointers may have become detached from the
10972 OMP_CLAUSE_CHAIN of previous groups if elements were removed from the
10973 end of those groups. Fix that now. */
10974 tree
*new_next
= NULL
;
10975 FOR_EACH_VEC_ELT (*groups
, i
, grp
)
10978 grp
->grp_start
= new_next
;
10980 new_next
= &OMP_CLAUSE_CHAIN (grp
->grp_end
);
10985 /* Similar to omp_resolve_clause_dependencies, but for OpenACC. The only
10986 clause dependencies we handle for now are struct element mappings and
10987 whole-struct mappings on the same directive, and duplicate clause
10991 oacc_resolve_clause_dependencies (vec
<omp_mapping_group
> *groups
,
10992 hash_map
<tree_operand_hash_no_se
,
10993 omp_mapping_group
*> *grpmap
)
10996 omp_mapping_group
*grp
;
10997 hash_set
<tree_operand_hash
> *seen_components
= NULL
;
10998 hash_set
<tree_operand_hash
> *shown_error
= NULL
;
11000 FOR_EACH_VEC_ELT (*groups
, i
, grp
)
11002 tree grp_end
= grp
->grp_end
;
11003 tree decl
= OMP_CLAUSE_DECL (grp_end
);
11005 gcc_assert (OMP_CLAUSE_CODE (grp_end
) == OMP_CLAUSE_MAP
);
11007 if (DECL_P (grp_end
))
11010 tree c
= OMP_CLAUSE_DECL (*grp
->grp_start
);
11011 while (TREE_CODE (c
) == ARRAY_REF
)
11012 c
= TREE_OPERAND (c
, 0);
11013 if (TREE_CODE (c
) != COMPONENT_REF
)
11015 if (!seen_components
)
11016 seen_components
= new hash_set
<tree_operand_hash
> ();
11018 shown_error
= new hash_set
<tree_operand_hash
> ();
11019 if (seen_components
->contains (c
)
11020 && !shown_error
->contains (c
))
11022 error_at (OMP_CLAUSE_LOCATION (grp_end
),
11023 "%qE appears more than once in map clauses",
11024 OMP_CLAUSE_DECL (grp_end
));
11025 shown_error
->add (c
);
11028 seen_components
->add (c
);
11030 omp_mapping_group
*struct_group
;
11031 if (omp_mapped_by_containing_struct (grpmap
, decl
, &struct_group
)
11032 && *grp
->grp_start
== grp_end
)
11034 omp_check_mapping_compatibility (OMP_CLAUSE_LOCATION (grp_end
),
11035 struct_group
, grp
);
11036 /* Remove the whole of this mapping -- redundant. */
11037 grp
->deleted
= true;
11041 if (seen_components
)
11042 delete seen_components
;
11044 delete shown_error
;
11047 /* Link node NEWNODE so it is pointed to by chain INSERT_AT. NEWNODE's chain
11048 is linked to the previous node pointed to by INSERT_AT. */
11051 omp_siblist_insert_node_after (tree newnode
, tree
*insert_at
)
11053 OMP_CLAUSE_CHAIN (newnode
) = *insert_at
;
11054 *insert_at
= newnode
;
11055 return &OMP_CLAUSE_CHAIN (newnode
);
11058 /* Move NODE (which is currently pointed to by the chain OLD_POS) so it is
11059 pointed to by chain MOVE_AFTER instead. */
11062 omp_siblist_move_node_after (tree node
, tree
*old_pos
, tree
*move_after
)
11064 gcc_assert (node
== *old_pos
);
11065 *old_pos
= OMP_CLAUSE_CHAIN (node
);
11066 OMP_CLAUSE_CHAIN (node
) = *move_after
;
11067 *move_after
= node
;
11070 /* Move nodes from FIRST_PTR (pointed to by previous node's chain) to
11071 LAST_NODE to after MOVE_AFTER chain. Similar to below function, but no
11072 new nodes are prepended to the list before splicing into the new position.
11073 Return the position we should continue scanning the list at, or NULL to
11074 stay where we were. */
11077 omp_siblist_move_nodes_after (tree
*first_ptr
, tree last_node
,
11080 if (first_ptr
== move_after
)
11083 tree tmp
= *first_ptr
;
11084 *first_ptr
= OMP_CLAUSE_CHAIN (last_node
);
11085 OMP_CLAUSE_CHAIN (last_node
) = *move_after
;
11091 /* Concatenate two lists described by [FIRST_NEW, LAST_NEW_TAIL] and
11092 [FIRST_PTR, LAST_NODE], and insert them in the OMP clause list after chain
11093 pointer MOVE_AFTER.
11095 The latter list was previously part of the OMP clause list, and the former
11096 (prepended) part is comprised of new nodes.
11098 We start with a list of nodes starting with a struct mapping node. We
11099 rearrange the list so that new nodes starting from FIRST_NEW and whose last
11100 node's chain is LAST_NEW_TAIL comes directly after MOVE_AFTER, followed by
11101 the group of mapping nodes we are currently processing (from the chain
11102 FIRST_PTR to LAST_NODE). The return value is the pointer to the next chain
11103 we should continue processing from, or NULL to stay where we were.
11105 The transformation (in the case where MOVE_AFTER and FIRST_PTR are
11106 different) is worked through below. Here we are processing LAST_NODE, and
11107 FIRST_PTR points at the preceding mapping clause:
11109 #. mapping node chain
11110 ---------------------------------------------------
11111 A. struct_node [->B]
11113 C. comp_2 [->D (move_after)]
11115 E. attach_3 [->F (first_ptr)]
11116 F. map_to_4 [->G (continue_at)]
11117 G. attach_4 (last_node) [->H]
11120 *last_new_tail = *first_ptr;
11122 I. new_node (first_new) [->F (last_new_tail)]
11124 *first_ptr = OMP_CLAUSE_CHAIN (last_node)
11126 #. mapping node chain
11127 ----------------------------------------------------
11128 A. struct_node [->B]
11130 C. comp_2 [->D (move_after)]
11132 E. attach_3 [->H (first_ptr)]
11133 F. map_to_4 [->G (continue_at)]
11134 G. attach_4 (last_node) [->H]
11137 I. new_node (first_new) [->F (last_new_tail)]
11139 OMP_CLAUSE_CHAIN (last_node) = *move_after;
11141 #. mapping node chain
11142 ---------------------------------------------------
11143 A. struct_node [->B]
11145 C. comp_2 [->D (move_after)]
11147 E. attach_3 [->H (continue_at)]
11149 G. attach_4 (last_node) [->D]
11152 I. new_node (first_new) [->F (last_new_tail)]
11154 *move_after = first_new;
11156 #. mapping node chain
11157 ---------------------------------------------------
11158 A. struct_node [->B]
11160 C. comp_2 [->I (move_after)]
11162 E. attach_3 [->H (continue_at)]
11164 G. attach_4 (last_node) [->D]
11166 I. new_node (first_new) [->F (last_new_tail)]
11170 #. mapping node chain
11171 ---------------------------------------------------
11172 A. struct_node [->B]
11174 C. comp_2 [->I (move_after)]
11175 I. new_node (first_new) [->F (last_new_tail)]
11177 G. attach_4 (last_node) [->D]
11179 E. attach_3 [->H (continue_at)]
11184 omp_siblist_move_concat_nodes_after (tree first_new
, tree
*last_new_tail
,
11185 tree
*first_ptr
, tree last_node
,
11188 tree
*continue_at
= NULL
;
11189 *last_new_tail
= *first_ptr
;
11190 if (first_ptr
== move_after
)
11191 *move_after
= first_new
;
11194 *first_ptr
= OMP_CLAUSE_CHAIN (last_node
);
11195 continue_at
= first_ptr
;
11196 OMP_CLAUSE_CHAIN (last_node
) = *move_after
;
11197 *move_after
= first_new
;
11199 return continue_at
;
11202 static omp_addr_token
*
11203 omp_first_chained_access_token (vec
<omp_addr_token
*> &addr_tokens
)
11205 using namespace omp_addr_tokenizer
;
11206 int idx
= addr_tokens
.length () - 1;
11207 gcc_assert (idx
>= 0);
11208 if (addr_tokens
[idx
]->type
!= ACCESS_METHOD
)
11209 return addr_tokens
[idx
];
11210 while (idx
> 0 && addr_tokens
[idx
- 1]->type
== ACCESS_METHOD
)
11212 return addr_tokens
[idx
];
11215 /* Mapping struct members causes an additional set of nodes to be created,
11216 starting with GOMP_MAP_STRUCT followed by a number of mappings equal to the
11217 number of members being mapped, in order of ascending position (address or
11220 We scan through the list of mapping clauses, calling this function for each
11221 struct member mapping we find, and build up the list of mappings after the
11222 initial GOMP_MAP_STRUCT node. For pointer members, these will be
11223 newly-created ALLOC nodes. For non-pointer members, the existing mapping is
11224 moved into place in the sorted list.
11233 #pragma (acc|omp directive) copy(struct.a[0:n], struct.b[0:n], struct.c,
11236 GOMP_MAP_STRUCT (4)
11237 [GOMP_MAP_FIRSTPRIVATE_REFERENCE -- for refs to structs]
11238 GOMP_MAP_ALLOC (struct.a)
11239 GOMP_MAP_ALLOC (struct.b)
11240 GOMP_MAP_TO (struct.c)
11241 GOMP_MAP_ALLOC (struct.d)
11244 In the case where we are mapping references to pointers, or in Fortran if
11245 we are mapping an array with a descriptor, additional nodes may be created
11246 after the struct node list also.
11248 The return code is either a pointer to the next node to process (if the
11249 list has been rearranged), else NULL to continue with the next node in the
11253 omp_accumulate_sibling_list (enum omp_region_type region_type
,
11254 enum tree_code code
,
11255 hash_map
<tree_operand_hash
, tree
>
11256 *&struct_map_to_clause
,
11257 hash_map
<tree_operand_hash_no_se
,
11258 omp_mapping_group
*> *group_map
,
11259 tree
*grp_start_p
, tree grp_end
,
11260 vec
<omp_addr_token
*> &addr_tokens
, tree
**inner
,
11261 bool *fragile_p
, bool reprocessing_struct
,
11264 using namespace omp_addr_tokenizer
;
11265 poly_offset_int coffset
;
11266 poly_int64 cbitpos
;
11267 tree ocd
= OMP_CLAUSE_DECL (grp_end
);
11268 bool openmp
= !(region_type
& ORT_ACC
);
11269 bool target
= (region_type
& ORT_TARGET
) != 0;
11270 tree
*continue_at
= NULL
;
11272 while (TREE_CODE (ocd
) == ARRAY_REF
)
11273 ocd
= TREE_OPERAND (ocd
, 0);
11277 omp_mapping_group
*to_group
11278 = omp_get_nonfirstprivate_group (group_map
, ocd
, true);
11284 omp_addr_token
*last_token
= omp_first_chained_access_token (addr_tokens
);
11285 if (last_token
->type
== ACCESS_METHOD
)
11287 switch (last_token
->u
.access_kind
)
11290 case ACCESS_REF_TO_POINTER
:
11291 case ACCESS_REF_TO_POINTER_OFFSET
:
11292 case ACCESS_INDEXED_REF_TO_ARRAY
:
11293 /* We may see either a bare reference or a dereferenced
11294 "convert_from_reference"-like one here. Handle either way. */
11295 if (TREE_CODE (ocd
) == INDIRECT_REF
)
11296 ocd
= TREE_OPERAND (ocd
, 0);
11297 gcc_assert (TREE_CODE (TREE_TYPE (ocd
)) == REFERENCE_TYPE
);
11305 bool variable_offset
;
11307 = extract_base_bit_offset (ocd
, &cbitpos
, &coffset
, &variable_offset
);
11310 for (base_token
= addr_tokens
.length () - 1; base_token
>= 0; base_token
--)
11312 if (addr_tokens
[base_token
]->type
== ARRAY_BASE
11313 || addr_tokens
[base_token
]->type
== STRUCTURE_BASE
)
11317 /* The two expressions in the assertion below aren't quite the same: if we
11318 have 'struct_base_decl access_indexed_array' for something like
11319 "myvar[2].x" then base will be "myvar" and addr_tokens[base_token]->expr
11320 will be "myvar[2]" -- the actual base of the structure.
11321 The former interpretation leads to a strange situation where we get
11322 struct(myvar) alloc(myvar[2].ptr1)
11323 That is, the array of structures is kind of treated as one big structure
11324 for the purposes of gathering sibling lists, etc. */
11325 /* gcc_assert (base == addr_tokens[base_token]->expr); */
11327 bool attach_detach
= ((OMP_CLAUSE_MAP_KIND (grp_end
)
11328 == GOMP_MAP_ATTACH_DETACH
)
11329 || (OMP_CLAUSE_MAP_KIND (grp_end
)
11330 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
));
11331 bool has_descriptor
= false;
11332 if (OMP_CLAUSE_CHAIN (*grp_start_p
) != grp_end
)
11334 tree grp_mid
= OMP_CLAUSE_CHAIN (*grp_start_p
);
11335 if (grp_mid
&& omp_map_clause_descriptor_p (grp_mid
))
11336 has_descriptor
= true;
11339 if (!struct_map_to_clause
|| struct_map_to_clause
->get (base
) == NULL
)
11341 enum gomp_map_kind str_kind
= GOMP_MAP_STRUCT
;
11343 if (struct_map_to_clause
== NULL
)
11344 struct_map_to_clause
= new hash_map
<tree_operand_hash
, tree
>;
11346 if (variable_offset
)
11347 str_kind
= GOMP_MAP_STRUCT_UNORD
;
11349 tree l
= build_omp_clause (OMP_CLAUSE_LOCATION (grp_end
), OMP_CLAUSE_MAP
);
11351 OMP_CLAUSE_SET_MAP_KIND (l
, str_kind
);
11352 OMP_CLAUSE_DECL (l
) = unshare_expr (base
);
11353 OMP_CLAUSE_SIZE (l
) = size_int (1);
11355 struct_map_to_clause
->put (base
, l
);
11357 /* On first iterating through the clause list, we insert the struct node
11358 just before the component access node that triggers the initial
11359 omp_accumulate_sibling_list call for a particular sibling list (and
11360 it then forms the first entry in that list). When reprocessing
11361 struct bases that are themselves component accesses, we insert the
11362 struct node on an off-side list to avoid inserting the new
11363 GOMP_MAP_STRUCT into the middle of the old one. */
11364 tree
*insert_node_pos
= reprocessing_struct
? *added_tail
: grp_start_p
;
11366 if (has_descriptor
)
11368 tree desc
= OMP_CLAUSE_CHAIN (*grp_start_p
);
11369 if (code
== OMP_TARGET_EXIT_DATA
|| code
== OACC_EXIT_DATA
)
11370 OMP_CLAUSE_SET_MAP_KIND (desc
, GOMP_MAP_RELEASE
);
11371 tree sc
= *insert_node_pos
;
11372 OMP_CLAUSE_CHAIN (l
) = desc
;
11373 OMP_CLAUSE_CHAIN (*grp_start_p
) = OMP_CLAUSE_CHAIN (desc
);
11374 OMP_CLAUSE_CHAIN (desc
) = sc
;
11375 *insert_node_pos
= l
;
11377 else if (attach_detach
)
11381 = build_omp_struct_comp_nodes (code
, *grp_start_p
, grp_end
,
11384 OMP_CLAUSE_CHAIN (l
) = alloc_node
;
11388 OMP_CLAUSE_CHAIN (extra_node
) = *insert_node_pos
;
11389 OMP_CLAUSE_CHAIN (alloc_node
) = extra_node
;
11390 tail
= &OMP_CLAUSE_CHAIN (extra_node
);
11394 OMP_CLAUSE_CHAIN (alloc_node
) = *insert_node_pos
;
11395 tail
= &OMP_CLAUSE_CHAIN (alloc_node
);
11398 /* For OpenMP semantics, we don't want to implicitly allocate
11399 space for the pointer here for non-compute regions (e.g. "enter
11400 data"). A FRAGILE_P node is only being created so that
11401 omp-low.cc is able to rewrite the struct properly.
11402 For references (to pointers), we want to actually allocate the
11403 space for the reference itself in the sorted list following the
11405 For pointers, we want to allocate space if we had an explicit
11406 mapping of the attachment point, but not otherwise. */
11411 && TREE_CODE (TREE_TYPE (ocd
)) == POINTER_TYPE
11412 && !OMP_CLAUSE_ATTACHMENT_MAPPING_ERASED (grp_end
)))
11414 if (!lang_GNU_Fortran ())
11415 /* In Fortran, pointers are dereferenced automatically, but may
11416 be unassociated. So we still want to allocate space for the
11417 pointer (as the base for an attach operation that should be
11418 present in the same directive's clause list also). */
11419 OMP_CLAUSE_SIZE (alloc_node
) = size_zero_node
;
11420 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (alloc_node
) = 1;
11423 *insert_node_pos
= l
;
11425 if (reprocessing_struct
)
11427 /* When reprocessing a struct node group used as the base of a
11428 subcomponent access, if we have a reference-to-pointer base,
11430 struct(**ptr) attach(*ptr)
11431 whereas for a non-reprocess-struct group, we see, e.g.:
11432 tofrom(**ptr) attach(*ptr) attach(ptr)
11433 and we create the "alloc" for the second "attach", i.e.
11434 for the reference itself. When reprocessing a struct group we
11435 thus change the pointer attachment into a reference attachment
11436 by stripping the indirection. (The attachment of the
11437 referenced pointer must happen elsewhere, either on the same
11438 directive, or otherwise.) */
11439 tree adecl
= OMP_CLAUSE_DECL (alloc_node
);
11441 if ((TREE_CODE (adecl
) == INDIRECT_REF
11442 || (TREE_CODE (adecl
) == MEM_REF
11443 && integer_zerop (TREE_OPERAND (adecl
, 1))))
11444 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (adecl
, 0)))
11446 && (TREE_CODE (TREE_TYPE (TREE_TYPE
11447 (TREE_OPERAND (adecl
, 0)))) == POINTER_TYPE
))
11448 OMP_CLAUSE_DECL (alloc_node
) = TREE_OPERAND (adecl
, 0);
11450 *added_tail
= tail
;
11455 gcc_assert (*grp_start_p
== grp_end
);
11456 if (reprocessing_struct
)
11458 /* If we don't have an attach/detach node, this is a
11459 "target data" directive or similar, not an offload region.
11460 Synthesize an "alloc" node using just the initiating
11461 GOMP_MAP_STRUCT decl. */
11462 gomp_map_kind k
= (code
== OMP_TARGET_EXIT_DATA
11463 || code
== OACC_EXIT_DATA
)
11464 ? GOMP_MAP_RELEASE
: GOMP_MAP_ALLOC
;
11466 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end
),
11468 OMP_CLAUSE_SET_MAP_KIND (alloc_node
, k
);
11469 OMP_CLAUSE_DECL (alloc_node
) = unshare_expr (last_token
->expr
);
11470 OMP_CLAUSE_SIZE (alloc_node
)
11471 = TYPE_SIZE_UNIT (TREE_TYPE (OMP_CLAUSE_DECL (alloc_node
)));
11473 OMP_CLAUSE_CHAIN (alloc_node
) = OMP_CLAUSE_CHAIN (l
);
11474 OMP_CLAUSE_CHAIN (l
) = alloc_node
;
11475 *insert_node_pos
= l
;
11476 *added_tail
= &OMP_CLAUSE_CHAIN (alloc_node
);
11479 grp_start_p
= omp_siblist_insert_node_after (l
, insert_node_pos
);
11482 unsigned last_access
= base_token
+ 1;
11484 while (last_access
+ 1 < addr_tokens
.length ()
11485 && addr_tokens
[last_access
+ 1]->type
== ACCESS_METHOD
)
11488 if ((region_type
& ORT_TARGET
)
11489 && addr_tokens
[base_token
+ 1]->type
== ACCESS_METHOD
)
11491 bool base_ref
= false;
11492 access_method_kinds access_kind
11493 = addr_tokens
[last_access
]->u
.access_kind
;
11495 switch (access_kind
)
11497 case ACCESS_DIRECT
:
11498 case ACCESS_INDEXED_ARRAY
:
11502 case ACCESS_REF_TO_POINTER
:
11503 case ACCESS_REF_TO_POINTER_OFFSET
:
11504 case ACCESS_INDEXED_REF_TO_ARRAY
:
11511 tree c2
= build_omp_clause (OMP_CLAUSE_LOCATION (grp_end
),
11513 enum gomp_map_kind mkind
;
11514 omp_mapping_group
*decl_group
;
11516 switch (access_kind
)
11518 case ACCESS_POINTER
:
11519 case ACCESS_POINTER_OFFSET
:
11520 use_base
= addr_tokens
[last_access
]->expr
;
11522 case ACCESS_REF_TO_POINTER
:
11523 case ACCESS_REF_TO_POINTER_OFFSET
:
11525 = build_fold_indirect_ref (addr_tokens
[last_access
]->expr
);
11528 use_base
= addr_tokens
[base_token
]->expr
;
11531 = omp_directive_maps_explicitly (group_map
, use_base
, &decl_group
,
11532 true, false, true);
11533 if (addr_tokens
[base_token
]->type
== STRUCTURE_BASE
11534 && DECL_P (addr_tokens
[last_access
]->expr
)
11536 mkind
= base_ref
? GOMP_MAP_FIRSTPRIVATE_REFERENCE
11537 : GOMP_MAP_FIRSTPRIVATE_POINTER
;
11539 mkind
= GOMP_MAP_ATTACH_DETACH
;
11541 OMP_CLAUSE_SET_MAP_KIND (c2
, mkind
);
11542 /* If we have a reference to pointer base, we want to attach the
11543 pointer here, not the reference. The reference attachment happens
11546 = (access_kind
== ACCESS_REF_TO_POINTER
11547 || access_kind
== ACCESS_REF_TO_POINTER_OFFSET
);
11548 tree sdecl
= addr_tokens
[last_access
]->expr
;
11549 tree sdecl_ptr
= ref_to_ptr
? build_fold_indirect_ref (sdecl
)
11551 /* For the FIRSTPRIVATE_REFERENCE after the struct node, we
11552 want to use the reference itself for the decl, but we
11553 still want to use the pointer to calculate the bias. */
11554 OMP_CLAUSE_DECL (c2
) = (mkind
== GOMP_MAP_ATTACH_DETACH
)
11555 ? sdecl_ptr
: sdecl
;
11557 tree baddr
= build_fold_addr_expr (base
);
11558 baddr
= fold_convert_loc (OMP_CLAUSE_LOCATION (grp_end
),
11559 ptrdiff_type_node
, baddr
);
11560 tree decladdr
= fold_convert_loc (OMP_CLAUSE_LOCATION (grp_end
),
11561 ptrdiff_type_node
, sdecl
);
11562 OMP_CLAUSE_SIZE (c2
)
11563 = fold_build2_loc (OMP_CLAUSE_LOCATION (grp_end
), MINUS_EXPR
,
11564 ptrdiff_type_node
, baddr
, decladdr
);
11565 /* Insert after struct node. */
11566 OMP_CLAUSE_CHAIN (c2
) = OMP_CLAUSE_CHAIN (l
);
11567 OMP_CLAUSE_CHAIN (l
) = c2
;
11569 if (addr_tokens
[base_token
]->type
== STRUCTURE_BASE
11570 && (addr_tokens
[base_token
]->u
.structure_base_kind
11571 == BASE_COMPONENT_EXPR
)
11572 && mkind
== GOMP_MAP_ATTACH_DETACH
11573 && addr_tokens
[last_access
]->u
.access_kind
!= ACCESS_REF
)
11575 *inner
= insert_node_pos
;
11582 if (addr_tokens
[base_token
]->type
== STRUCTURE_BASE
11583 && (addr_tokens
[base_token
]->u
.structure_base_kind
11584 == BASE_COMPONENT_EXPR
)
11585 && addr_tokens
[last_access
]->u
.access_kind
== ACCESS_REF
)
11586 *inner
= insert_node_pos
;
11590 else if (struct_map_to_clause
)
11592 tree
*osc
= struct_map_to_clause
->get (base
);
11593 tree
*sc
= NULL
, *scp
= NULL
;
11594 bool unordered
= false;
11596 if (osc
&& OMP_CLAUSE_MAP_KIND (*osc
) == GOMP_MAP_STRUCT_UNORD
)
11599 unsigned HOST_WIDE_INT i
, elems
= tree_to_uhwi (OMP_CLAUSE_SIZE (*osc
));
11600 sc
= &OMP_CLAUSE_CHAIN (*osc
);
11601 /* The struct mapping might be immediately followed by a
11602 FIRSTPRIVATE_POINTER, FIRSTPRIVATE_REFERENCE or an ATTACH_DETACH --
11603 if it's an indirect access or a reference, or if the structure base
11604 is not a decl. The FIRSTPRIVATE_* nodes are removed in omp-low.cc
11605 after they have been processed there, and ATTACH_DETACH nodes are
11606 recomputed and moved out of the GOMP_MAP_STRUCT construct once
11607 sibling list building is complete. */
11608 if (OMP_CLAUSE_MAP_KIND (*sc
) == GOMP_MAP_FIRSTPRIVATE_POINTER
11609 || OMP_CLAUSE_MAP_KIND (*sc
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
11610 || OMP_CLAUSE_MAP_KIND (*sc
) == GOMP_MAP_ATTACH_DETACH
)
11611 sc
= &OMP_CLAUSE_CHAIN (*sc
);
11612 for (i
= 0; i
< elems
; i
++, sc
= &OMP_CLAUSE_CHAIN (*sc
))
11613 if (attach_detach
&& sc
== grp_start_p
)
11615 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc
)) != COMPONENT_REF
11616 && TREE_CODE (OMP_CLAUSE_DECL (*sc
)) != INDIRECT_REF
11617 && TREE_CODE (OMP_CLAUSE_DECL (*sc
)) != ARRAY_REF
)
11621 tree sc_decl
= OMP_CLAUSE_DECL (*sc
);
11622 poly_offset_int offset
;
11625 if (TREE_CODE (sc_decl
) == ARRAY_REF
)
11627 while (TREE_CODE (sc_decl
) == ARRAY_REF
)
11628 sc_decl
= TREE_OPERAND (sc_decl
, 0);
11629 if (TREE_CODE (sc_decl
) != COMPONENT_REF
11630 || TREE_CODE (TREE_TYPE (sc_decl
)) != ARRAY_TYPE
)
11633 else if (INDIRECT_REF_P (sc_decl
)
11634 && TREE_CODE (TREE_OPERAND (sc_decl
, 0)) == COMPONENT_REF
11635 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (sc_decl
, 0)))
11636 == REFERENCE_TYPE
))
11637 sc_decl
= TREE_OPERAND (sc_decl
, 0);
11639 bool variable_offset2
;
11640 tree base2
= extract_base_bit_offset (sc_decl
, &bitpos
, &offset
,
11641 &variable_offset2
);
11642 if (!base2
|| !operand_equal_p (base2
, base
, 0))
11646 if (variable_offset2
)
11648 OMP_CLAUSE_SET_MAP_KIND (*osc
, GOMP_MAP_STRUCT_UNORD
);
11652 else if ((region_type
& ORT_ACC
) != 0)
11654 /* For OpenACC, allow (ignore) duplicate struct accesses in
11655 the middle of a mapping clause, e.g. "mystruct->foo" in:
11656 copy(mystruct->foo->bar) copy(mystruct->foo->qux). */
11657 if (reprocessing_struct
11658 && known_eq (coffset
, offset
)
11659 && known_eq (cbitpos
, bitpos
))
11662 else if (known_eq (coffset
, offset
)
11663 && known_eq (cbitpos
, bitpos
))
11665 /* Having two struct members at the same offset doesn't work,
11666 so make sure we don't. (We're allowed to ignore this.
11667 Should we report the error?) */
11668 /*error_at (OMP_CLAUSE_LOCATION (grp_end),
11669 "duplicate struct member %qE in map clauses",
11670 OMP_CLAUSE_DECL (grp_end));*/
11673 if (maybe_lt (coffset
, offset
)
11674 || (known_eq (coffset
, offset
)
11675 && maybe_lt (cbitpos
, bitpos
)))
11684 /* If this is an unordered struct, just insert the new element at the
11685 end of the list. */
11688 for (; i
< elems
; i
++)
11689 sc
= &OMP_CLAUSE_CHAIN (*sc
);
11693 OMP_CLAUSE_SIZE (*osc
)
11694 = size_binop (PLUS_EXPR
, OMP_CLAUSE_SIZE (*osc
), size_one_node
);
11696 if (reprocessing_struct
)
11698 /* If we're reprocessing a struct node, we don't want to do most of
11699 the list manipulation below. We only need to handle the (pointer
11700 or reference) attach/detach case. */
11701 tree extra_node
, alloc_node
;
11702 if (has_descriptor
)
11703 gcc_unreachable ();
11704 else if (attach_detach
)
11705 alloc_node
= build_omp_struct_comp_nodes (code
, *grp_start_p
,
11706 grp_end
, &extra_node
);
11709 /* If we don't have an attach/detach node, this is a
11710 "target data" directive or similar, not an offload region.
11711 Synthesize an "alloc" node using just the initiating
11712 GOMP_MAP_STRUCT decl. */
11713 gomp_map_kind k
= (code
== OMP_TARGET_EXIT_DATA
11714 || code
== OACC_EXIT_DATA
)
11715 ? GOMP_MAP_RELEASE
: GOMP_MAP_ALLOC
;
11717 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end
),
11719 OMP_CLAUSE_SET_MAP_KIND (alloc_node
, k
);
11720 OMP_CLAUSE_DECL (alloc_node
) = unshare_expr (last_token
->expr
);
11721 OMP_CLAUSE_SIZE (alloc_node
)
11722 = TYPE_SIZE_UNIT (TREE_TYPE (OMP_CLAUSE_DECL (alloc_node
)));
11726 omp_siblist_insert_node_after (alloc_node
, scp
);
11729 tree
*new_end
= omp_siblist_insert_node_after (alloc_node
, sc
);
11730 if (sc
== *added_tail
)
11731 *added_tail
= new_end
;
11737 if (has_descriptor
)
11739 tree desc
= OMP_CLAUSE_CHAIN (*grp_start_p
);
11740 if (code
== OMP_TARGET_EXIT_DATA
11741 || code
== OACC_EXIT_DATA
)
11742 OMP_CLAUSE_SET_MAP_KIND (desc
, GOMP_MAP_RELEASE
);
11743 omp_siblist_move_node_after (desc
,
11744 &OMP_CLAUSE_CHAIN (*grp_start_p
),
11747 else if (attach_detach
)
11749 tree cl
= NULL_TREE
, extra_node
;
11750 tree alloc_node
= build_omp_struct_comp_nodes (code
, *grp_start_p
,
11751 grp_end
, &extra_node
);
11752 tree
*tail_chain
= NULL
;
11758 && TREE_CODE (TREE_TYPE (ocd
)) == POINTER_TYPE
11759 && !OMP_CLAUSE_ATTACHMENT_MAPPING_ERASED (grp_end
)))
11761 if (!lang_GNU_Fortran ())
11762 OMP_CLAUSE_SIZE (alloc_node
) = size_zero_node
;
11763 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (alloc_node
) = 1;
11768 grp_end : the last (or only) node in this group.
11769 grp_start_p : pointer to the first node in a pointer mapping group
11770 up to and including GRP_END.
11771 sc : pointer to the chain for the end of the struct component
11773 scp : pointer to the chain for the sorted position at which we
11774 should insert in the middle of the struct component list
11775 (else NULL to insert at end).
11776 alloc_node : the "alloc" node for the structure (pointer-type)
11777 component. We insert at SCP (if present), else SC
11778 (the end of the struct component list).
11779 extra_node : a newly-synthesized node for an additional indirect
11780 pointer mapping or a Fortran pointer set, if needed.
11781 cl : first node to prepend before grp_start_p.
11782 tail_chain : pointer to chain of last prepended node.
11784 The general idea is we move the nodes for this struct mapping
11785 together: the alloc node goes into the sorted list directly after
11786 the struct mapping, and any extra nodes (together with the nodes
11787 mapping arrays pointed to by struct components) get moved after
11788 that list. When SCP is NULL, we insert the nodes at SC, i.e. at
11789 the end of the struct component mapping list. It's important that
11790 the alloc_node comes first in that case because it's part of the
11791 sorted component mapping list (but subsequent nodes are not!). */
11794 omp_siblist_insert_node_after (alloc_node
, scp
);
11796 /* Make [cl,tail_chain] a list of the alloc node (if we haven't
11797 already inserted it) and the extra_node (if it is present). The
11798 list can be empty if we added alloc_node above and there is no
11800 if (scp
&& extra_node
)
11803 tail_chain
= &OMP_CLAUSE_CHAIN (extra_node
);
11805 else if (extra_node
)
11807 OMP_CLAUSE_CHAIN (alloc_node
) = extra_node
;
11809 tail_chain
= &OMP_CLAUSE_CHAIN (extra_node
);
11814 tail_chain
= &OMP_CLAUSE_CHAIN (alloc_node
);
11818 = cl
? omp_siblist_move_concat_nodes_after (cl
, tail_chain
,
11819 grp_start_p
, grp_end
,
11821 : omp_siblist_move_nodes_after (grp_start_p
, grp_end
, sc
);
11823 else if (*sc
!= grp_end
)
11825 gcc_assert (*grp_start_p
== grp_end
);
11827 /* We are moving the current node back to a previous struct node:
11828 the node that used to point to the current node will now point to
11830 continue_at
= grp_start_p
;
11831 /* In the non-pointer case, the mapping clause itself is moved into
11832 the correct position in the struct component list, which in this
11833 case is just SC. */
11834 omp_siblist_move_node_after (*grp_start_p
, grp_start_p
, sc
);
11837 return continue_at
;
11840 /* Scan through GROUPS, and create sorted structure sibling lists without
11844 omp_build_struct_sibling_lists (enum tree_code code
,
11845 enum omp_region_type region_type
,
11846 vec
<omp_mapping_group
> *groups
,
11847 hash_map
<tree_operand_hash_no_se
,
11848 omp_mapping_group
*> **grpmap
,
11851 using namespace omp_addr_tokenizer
;
11853 omp_mapping_group
*grp
;
11854 hash_map
<tree_operand_hash
, tree
> *struct_map_to_clause
= NULL
;
11855 bool success
= true;
11856 tree
*new_next
= NULL
;
11857 tree
*tail
= &OMP_CLAUSE_CHAIN ((*groups
)[groups
->length () - 1].grp_end
);
11858 tree added_nodes
= NULL_TREE
;
11859 tree
*added_tail
= &added_nodes
;
11860 auto_vec
<omp_mapping_group
> pre_hwm_groups
;
11862 FOR_EACH_VEC_ELT (*groups
, i
, grp
)
11864 tree c
= grp
->grp_end
;
11865 tree decl
= OMP_CLAUSE_DECL (c
);
11866 tree grp_end
= grp
->grp_end
;
11867 auto_vec
<omp_addr_token
*> addr_tokens
;
11868 tree sentinel
= OMP_CLAUSE_CHAIN (grp_end
);
11870 if (new_next
&& !grp
->reprocess_struct
)
11871 grp
->grp_start
= new_next
;
11875 tree
*grp_start_p
= grp
->grp_start
;
11880 /* Skip groups we marked for deletion in
11881 {omp,oacc}_resolve_clause_dependencies. */
11885 if (OMP_CLAUSE_CHAIN (*grp_start_p
)
11886 && OMP_CLAUSE_CHAIN (*grp_start_p
) != grp_end
)
11888 /* Don't process an array descriptor that isn't inside a derived type
11889 as a struct (the GOMP_MAP_POINTER following will have the form
11890 "var.data", but such mappings are handled specially). */
11891 tree grpmid
= OMP_CLAUSE_CHAIN (*grp_start_p
);
11892 if (omp_map_clause_descriptor_p (grpmid
)
11893 && DECL_P (OMP_CLAUSE_DECL (grpmid
)))
11899 while (TREE_CODE (expr
) == ARRAY_REF
)
11900 expr
= TREE_OPERAND (expr
, 0);
11902 if (!omp_parse_expr (addr_tokens
, expr
))
11905 omp_addr_token
*last_token
11906 = omp_first_chained_access_token (addr_tokens
);
11908 /* A mapping of a reference to a pointer member that doesn't specify an
11909 array section, etc., like this:
11910 *mystruct.ref_to_ptr
11911 should not be processed by the struct sibling-list handling code --
11912 it just transfers the referenced pointer.
11914 In contrast, the quite similar-looking construct:
11916 which is equivalent to e.g.
11918 *does* trigger sibling-list processing.
11920 An exception for the former case is for "fragile" groups where the
11921 reference itself is not handled otherwise; this is subject to special
11922 handling in omp_accumulate_sibling_list also. */
11924 if (TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
11925 && last_token
->type
== ACCESS_METHOD
11926 && last_token
->u
.access_kind
== ACCESS_REF
11931 if (TREE_CODE (d
) == ARRAY_REF
)
11933 while (TREE_CODE (d
) == ARRAY_REF
)
11934 d
= TREE_OPERAND (d
, 0);
11935 if (TREE_CODE (d
) == COMPONENT_REF
11936 && TREE_CODE (TREE_TYPE (d
)) == ARRAY_TYPE
)
11940 && INDIRECT_REF_P (decl
)
11941 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
11942 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
11944 && (OMP_CLAUSE_MAP_KIND (c
)
11945 != GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION
))
11946 decl
= TREE_OPERAND (decl
, 0);
11950 if (TREE_CODE (decl
) != COMPONENT_REF
)
11953 /* If we're mapping the whole struct in another node, skip adding this
11954 node to a sibling list. */
11955 omp_mapping_group
*wholestruct
;
11956 if (omp_mapped_by_containing_struct (*grpmap
, OMP_CLAUSE_DECL (c
),
11960 if (OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_TO_PSET
11961 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ATTACH
11962 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_DETACH
11963 && code
!= OACC_UPDATE
11964 && code
!= OMP_TARGET_UPDATE
)
11966 if (error_operand_p (decl
))
11972 tree stype
= TREE_TYPE (decl
);
11973 if (TREE_CODE (stype
) == REFERENCE_TYPE
)
11974 stype
= TREE_TYPE (stype
);
11975 if (TYPE_SIZE_UNIT (stype
) == NULL
11976 || TREE_CODE (TYPE_SIZE_UNIT (stype
)) != INTEGER_CST
)
11978 error_at (OMP_CLAUSE_LOCATION (c
),
11979 "mapping field %qE of variable length "
11980 "structure", OMP_CLAUSE_DECL (c
));
11985 tree
*inner
= NULL
;
11986 bool fragile_p
= grp
->fragile
;
11989 = omp_accumulate_sibling_list (region_type
, code
,
11990 struct_map_to_clause
, *grpmap
,
11991 grp_start_p
, grp_end
, addr_tokens
,
11992 &inner
, &fragile_p
,
11993 grp
->reprocess_struct
, &added_tail
);
11997 omp_mapping_group newgrp
;
11998 newgrp
.grp_start
= inner
;
11999 if (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (*inner
))
12000 == GOMP_MAP_ATTACH_DETACH
)
12001 newgrp
.grp_end
= OMP_CLAUSE_CHAIN (*inner
);
12003 newgrp
.grp_end
= *inner
;
12004 newgrp
.mark
= UNVISITED
;
12005 newgrp
.sibling
= NULL
;
12006 newgrp
.deleted
= false;
12007 newgrp
.reprocess_struct
= true;
12008 newgrp
.fragile
= fragile_p
;
12009 newgrp
.next
= NULL
;
12010 groups
->safe_push (newgrp
);
12012 /* !!! Growing GROUPS might invalidate the pointers in the group
12013 map. Rebuild it here. This is a bit inefficient, but
12014 shouldn't happen very often. */
12017 = omp_reindex_mapping_groups (list_p
, groups
, &pre_hwm_groups
,
12023 /* Delete groups marked for deletion above. At this point the order of the
12024 groups may no longer correspond to the order of the underlying list,
12025 which complicates this a little. First clear out OMP_CLAUSE_DECL for
12026 deleted nodes... */
12028 FOR_EACH_VEC_ELT (*groups
, i
, grp
)
12030 for (tree d
= *grp
->grp_start
;
12031 d
!= OMP_CLAUSE_CHAIN (grp
->grp_end
);
12032 d
= OMP_CLAUSE_CHAIN (d
))
12033 OMP_CLAUSE_DECL (d
) = NULL_TREE
;
12035 /* ...then sweep through the list removing the now-empty nodes. */
12040 if (OMP_CLAUSE_CODE (*tail
) == OMP_CLAUSE_MAP
12041 && OMP_CLAUSE_DECL (*tail
) == NULL_TREE
)
12042 *tail
= OMP_CLAUSE_CHAIN (*tail
);
12044 tail
= &OMP_CLAUSE_CHAIN (*tail
);
12047 /* Tack on the struct nodes added during nested struct reprocessing. */
12050 *tail
= added_nodes
;
12054 /* Now we have finished building the struct sibling lists, reprocess
12055 newly-added "attach" nodes: we need the address of the first
12056 mapped element of each struct sibling list for the bias of the attach
12057 operation -- not necessarily the base address of the whole struct. */
12058 if (struct_map_to_clause
)
12059 for (hash_map
<tree_operand_hash
, tree
>::iterator iter
12060 = struct_map_to_clause
->begin ();
12061 iter
!= struct_map_to_clause
->end ();
12064 tree struct_node
= (*iter
).second
;
12065 gcc_assert (OMP_CLAUSE_CODE (struct_node
) == OMP_CLAUSE_MAP
);
12066 tree attach
= OMP_CLAUSE_CHAIN (struct_node
);
12068 if (OMP_CLAUSE_CODE (attach
) != OMP_CLAUSE_MAP
12069 || OMP_CLAUSE_MAP_KIND (attach
) != GOMP_MAP_ATTACH_DETACH
)
12072 OMP_CLAUSE_SET_MAP_KIND (attach
, GOMP_MAP_ATTACH
);
12074 /* Sanity check: the standalone attach node will not work if we have
12075 an "enter data" operation (because for those, variables need to be
12076 mapped separately and attach nodes must be grouped together with the
12077 base they attach to). We should only have created the
12078 ATTACH_DETACH node after GOMP_MAP_STRUCT for a target region, so
12079 this should never be true. */
12080 gcc_assert ((region_type
& ORT_TARGET
) != 0);
12082 /* This is the first sorted node in the struct sibling list. Use it
12083 to recalculate the correct bias to use.
12084 (&first_node - attach_decl).
12085 For GOMP_MAP_STRUCT_UNORD, we need e.g. the
12086 min(min(min(first,second),third),fourth) element, because the
12087 elements aren't in any particular order. */
12089 if (OMP_CLAUSE_MAP_KIND (struct_node
) == GOMP_MAP_STRUCT_UNORD
)
12091 tree first_node
= OMP_CLAUSE_CHAIN (attach
);
12092 unsigned HOST_WIDE_INT num_mappings
12093 = tree_to_uhwi (OMP_CLAUSE_SIZE (struct_node
));
12094 lowest_addr
= OMP_CLAUSE_DECL (first_node
);
12095 lowest_addr
= build_fold_addr_expr (lowest_addr
);
12096 lowest_addr
= fold_convert (pointer_sized_int_node
, lowest_addr
);
12097 tree next_node
= OMP_CLAUSE_CHAIN (first_node
);
12098 while (num_mappings
> 1)
12100 tree tmp
= OMP_CLAUSE_DECL (next_node
);
12101 tmp
= build_fold_addr_expr (tmp
);
12102 tmp
= fold_convert (pointer_sized_int_node
, tmp
);
12103 lowest_addr
= fold_build2 (MIN_EXPR
, pointer_sized_int_node
,
12105 next_node
= OMP_CLAUSE_CHAIN (next_node
);
12108 lowest_addr
= fold_convert (ptrdiff_type_node
, lowest_addr
);
12112 tree first_node
= OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (attach
));
12113 first_node
= build_fold_addr_expr (first_node
);
12114 lowest_addr
= fold_convert (ptrdiff_type_node
, first_node
);
12116 tree attach_decl
= OMP_CLAUSE_DECL (attach
);
12117 attach_decl
= fold_convert (ptrdiff_type_node
, attach_decl
);
12118 OMP_CLAUSE_SIZE (attach
)
12119 = fold_build2 (MINUS_EXPR
, ptrdiff_type_node
, lowest_addr
,
12122 /* Remove GOMP_MAP_ATTACH node from after struct node. */
12123 OMP_CLAUSE_CHAIN (struct_node
) = OMP_CLAUSE_CHAIN (attach
);
12124 /* ...and re-insert it at the end of our clause list. */
12126 OMP_CLAUSE_CHAIN (attach
) = NULL_TREE
;
12127 tail
= &OMP_CLAUSE_CHAIN (attach
);
12131 if (struct_map_to_clause
)
12132 delete struct_map_to_clause
;
12137 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
12138 and previous omp contexts. */
12141 gimplify_scan_omp_clauses (tree
*list_p
, gimple_seq
*pre_p
,
12142 enum omp_region_type region_type
,
12143 enum tree_code code
)
12145 using namespace omp_addr_tokenizer
;
12146 struct gimplify_omp_ctx
*ctx
, *outer_ctx
;
12148 tree
*orig_list_p
= list_p
;
12149 int handled_depend_iterators
= -1;
12152 ctx
= new_omp_context (region_type
);
12154 outer_ctx
= ctx
->outer_context
;
12155 if (code
== OMP_TARGET
)
12157 if (!lang_GNU_Fortran ())
12158 ctx
->defaultmap
[GDMK_POINTER
] = GOVD_MAP
| GOVD_MAP_0LEN_ARRAY
;
12159 ctx
->defaultmap
[GDMK_SCALAR
] = GOVD_FIRSTPRIVATE
;
12160 ctx
->defaultmap
[GDMK_SCALAR_TARGET
] = (lang_GNU_Fortran ()
12161 ? GOVD_MAP
: GOVD_FIRSTPRIVATE
);
12163 if (!lang_GNU_Fortran ())
12167 case OMP_TARGET_DATA
:
12168 case OMP_TARGET_ENTER_DATA
:
12169 case OMP_TARGET_EXIT_DATA
:
12171 case OACC_HOST_DATA
:
12172 case OACC_PARALLEL
:
12174 ctx
->target_firstprivatize_array_bases
= true;
12179 vec
<omp_mapping_group
> *groups
= NULL
;
12180 hash_map
<tree_operand_hash_no_se
, omp_mapping_group
*> *grpmap
= NULL
;
12181 unsigned grpnum
= 0;
12182 tree
*grp_start_p
= NULL
, grp_end
= NULL_TREE
;
12184 if (code
== OMP_TARGET
12185 || code
== OMP_TARGET_DATA
12186 || code
== OMP_TARGET_ENTER_DATA
12187 || code
== OMP_TARGET_EXIT_DATA
12188 || code
== OACC_DATA
12189 || code
== OACC_KERNELS
12190 || code
== OACC_PARALLEL
12191 || code
== OACC_SERIAL
12192 || code
== OACC_ENTER_DATA
12193 || code
== OACC_EXIT_DATA
12194 || code
== OACC_UPDATE
12195 || code
== OACC_DECLARE
)
12197 groups
= omp_gather_mapping_groups (list_p
);
12200 grpmap
= omp_index_mapping_groups (groups
);
12203 while ((c
= *list_p
) != NULL
)
12205 bool remove
= false;
12206 bool notice_outer
= true;
12207 bool map_descriptor
;
12208 const char *check_non_private
= NULL
;
12209 unsigned int flags
;
12211 auto_vec
<omp_addr_token
*, 10> addr_tokens
;
12213 if (grp_end
&& c
== OMP_CLAUSE_CHAIN (grp_end
))
12215 grp_start_p
= NULL
;
12216 grp_end
= NULL_TREE
;
12219 switch (OMP_CLAUSE_CODE (c
))
12221 case OMP_CLAUSE_PRIVATE
:
12222 flags
= GOVD_PRIVATE
| GOVD_EXPLICIT
;
12223 if (lang_hooks
.decls
.omp_private_outer_ref (OMP_CLAUSE_DECL (c
)))
12225 flags
|= GOVD_PRIVATE_OUTER_REF
;
12226 OMP_CLAUSE_PRIVATE_OUTER_REF (c
) = 1;
12229 notice_outer
= false;
12231 case OMP_CLAUSE_SHARED
:
12232 flags
= GOVD_SHARED
| GOVD_EXPLICIT
;
12234 case OMP_CLAUSE_FIRSTPRIVATE
:
12235 flags
= GOVD_FIRSTPRIVATE
| GOVD_EXPLICIT
;
12236 check_non_private
= "firstprivate";
12237 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
12239 gcc_assert (code
== OMP_TARGET
);
12240 flags
|= GOVD_FIRSTPRIVATE_IMPLICIT
;
12243 case OMP_CLAUSE_LASTPRIVATE
:
12244 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
12247 case OMP_DISTRIBUTE
:
12248 error_at (OMP_CLAUSE_LOCATION (c
),
12249 "conditional %<lastprivate%> clause on "
12250 "%qs construct", "distribute");
12251 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) = 0;
12254 error_at (OMP_CLAUSE_LOCATION (c
),
12255 "conditional %<lastprivate%> clause on "
12256 "%qs construct", "taskloop");
12257 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) = 0;
12262 flags
= GOVD_LASTPRIVATE
| GOVD_SEEN
| GOVD_EXPLICIT
;
12263 if (code
!= OMP_LOOP
)
12264 check_non_private
= "lastprivate";
12265 decl
= OMP_CLAUSE_DECL (c
);
12266 if (error_operand_p (decl
))
12268 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
)
12269 && !lang_hooks
.decls
.omp_scalar_p (decl
, true))
12271 error_at (OMP_CLAUSE_LOCATION (c
),
12272 "non-scalar variable %qD in conditional "
12273 "%<lastprivate%> clause", decl
);
12274 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) = 0;
12276 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
12277 flags
|= GOVD_LASTPRIVATE_CONDITIONAL
;
12278 omp_lastprivate_for_combined_outer_constructs (outer_ctx
, decl
,
12281 case OMP_CLAUSE_REDUCTION
:
12282 if (OMP_CLAUSE_REDUCTION_TASK (c
))
12284 if (region_type
== ORT_WORKSHARE
|| code
== OMP_SCOPE
)
12287 nowait
= omp_find_clause (*list_p
,
12288 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
12290 && (outer_ctx
== NULL
12291 || outer_ctx
->region_type
!= ORT_COMBINED_PARALLEL
))
12293 error_at (OMP_CLAUSE_LOCATION (c
),
12294 "%<task%> reduction modifier on a construct "
12295 "with a %<nowait%> clause");
12296 OMP_CLAUSE_REDUCTION_TASK (c
) = 0;
12299 else if ((region_type
& ORT_PARALLEL
) != ORT_PARALLEL
)
12301 error_at (OMP_CLAUSE_LOCATION (c
),
12302 "invalid %<task%> reduction modifier on construct "
12303 "other than %<parallel%>, %qs, %<sections%> or "
12304 "%<scope%>", lang_GNU_Fortran () ? "do" : "for");
12305 OMP_CLAUSE_REDUCTION_TASK (c
) = 0;
12308 if (OMP_CLAUSE_REDUCTION_INSCAN (c
))
12312 error_at (OMP_CLAUSE_LOCATION (c
),
12313 "%<inscan%> %<reduction%> clause on "
12314 "%qs construct", "sections");
12315 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
12318 error_at (OMP_CLAUSE_LOCATION (c
),
12319 "%<inscan%> %<reduction%> clause on "
12320 "%qs construct", "parallel");
12321 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
12324 error_at (OMP_CLAUSE_LOCATION (c
),
12325 "%<inscan%> %<reduction%> clause on "
12326 "%qs construct", "teams");
12327 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
12330 error_at (OMP_CLAUSE_LOCATION (c
),
12331 "%<inscan%> %<reduction%> clause on "
12332 "%qs construct", "taskloop");
12333 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
12336 error_at (OMP_CLAUSE_LOCATION (c
),
12337 "%<inscan%> %<reduction%> clause on "
12338 "%qs construct", "scope");
12339 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
12345 case OMP_CLAUSE_IN_REDUCTION
:
12346 case OMP_CLAUSE_TASK_REDUCTION
:
12347 flags
= GOVD_REDUCTION
| GOVD_SEEN
| GOVD_EXPLICIT
;
12348 /* OpenACC permits reductions on private variables. */
12349 if (!(region_type
& ORT_ACC
)
12350 /* taskgroup is actually not a worksharing region. */
12351 && code
!= OMP_TASKGROUP
)
12352 check_non_private
= omp_clause_code_name
[OMP_CLAUSE_CODE (c
)];
12353 decl
= OMP_CLAUSE_DECL (c
);
12354 if (TREE_CODE (decl
) == MEM_REF
)
12356 tree type
= TREE_TYPE (decl
);
12357 bool saved_into_ssa
= gimplify_ctxp
->into_ssa
;
12358 gimplify_ctxp
->into_ssa
= false;
12359 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type
)), pre_p
,
12360 NULL
, is_gimple_val
, fb_rvalue
, false)
12363 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
12367 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
12368 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
12371 omp_firstprivatize_variable (ctx
, v
);
12372 omp_notice_variable (ctx
, v
, true);
12374 decl
= TREE_OPERAND (decl
, 0);
12375 if (TREE_CODE (decl
) == POINTER_PLUS_EXPR
)
12377 gimplify_ctxp
->into_ssa
= false;
12378 if (gimplify_expr (&TREE_OPERAND (decl
, 1), pre_p
,
12379 NULL
, is_gimple_val
, fb_rvalue
, false)
12382 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
12386 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
12387 v
= TREE_OPERAND (decl
, 1);
12390 omp_firstprivatize_variable (ctx
, v
);
12391 omp_notice_variable (ctx
, v
, true);
12393 decl
= TREE_OPERAND (decl
, 0);
12395 if (TREE_CODE (decl
) == ADDR_EXPR
12396 || TREE_CODE (decl
) == INDIRECT_REF
)
12397 decl
= TREE_OPERAND (decl
, 0);
12400 case OMP_CLAUSE_LINEAR
:
12401 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c
), pre_p
, NULL
,
12402 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
12409 if (code
== OMP_SIMD
12410 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
12412 struct gimplify_omp_ctx
*octx
= outer_ctx
;
12414 && octx
->region_type
== ORT_WORKSHARE
12415 && octx
->combined_loop
12416 && !octx
->distribute
)
12418 if (octx
->outer_context
12419 && (octx
->outer_context
->region_type
12420 == ORT_COMBINED_PARALLEL
))
12421 octx
= octx
->outer_context
->outer_context
;
12423 octx
= octx
->outer_context
;
12426 && octx
->region_type
== ORT_WORKSHARE
12427 && octx
->combined_loop
12428 && octx
->distribute
)
12430 error_at (OMP_CLAUSE_LOCATION (c
),
12431 "%<linear%> clause for variable other than "
12432 "loop iterator specified on construct "
12433 "combined with %<distribute%>");
12438 /* For combined #pragma omp parallel for simd, need to put
12439 lastprivate and perhaps firstprivate too on the
12440 parallel. Similarly for #pragma omp for simd. */
12441 struct gimplify_omp_ctx
*octx
= outer_ctx
;
12442 bool taskloop_seen
= false;
12446 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
12447 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
12449 decl
= OMP_CLAUSE_DECL (c
);
12450 if (error_operand_p (decl
))
12456 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
12457 flags
|= GOVD_FIRSTPRIVATE
;
12458 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
12459 flags
|= GOVD_LASTPRIVATE
;
12461 && octx
->region_type
== ORT_WORKSHARE
12462 && octx
->combined_loop
)
12464 if (octx
->outer_context
12465 && (octx
->outer_context
->region_type
12466 == ORT_COMBINED_PARALLEL
))
12467 octx
= octx
->outer_context
;
12468 else if (omp_check_private (octx
, decl
, false))
12472 && (octx
->region_type
& ORT_TASK
) != 0
12473 && octx
->combined_loop
)
12474 taskloop_seen
= true;
12476 && octx
->region_type
== ORT_COMBINED_PARALLEL
12477 && ((ctx
->region_type
== ORT_WORKSHARE
12478 && octx
== outer_ctx
)
12480 flags
= GOVD_SEEN
| GOVD_SHARED
;
12482 && ((octx
->region_type
& ORT_COMBINED_TEAMS
)
12483 == ORT_COMBINED_TEAMS
))
12484 flags
= GOVD_SEEN
| GOVD_SHARED
;
12486 && octx
->region_type
== ORT_COMBINED_TARGET
)
12488 if (flags
& GOVD_LASTPRIVATE
)
12489 flags
= GOVD_SEEN
| GOVD_MAP
;
12494 = splay_tree_lookup (octx
->variables
,
12495 (splay_tree_key
) decl
);
12496 if (on
&& (on
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
12501 omp_add_variable (octx
, decl
, flags
);
12502 if (octx
->outer_context
== NULL
)
12504 octx
= octx
->outer_context
;
12509 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
12510 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
12511 omp_notice_variable (octx
, decl
, true);
12513 flags
= GOVD_LINEAR
| GOVD_EXPLICIT
;
12514 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
12515 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
12517 notice_outer
= false;
12518 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
12522 case OMP_CLAUSE_MAP
:
12525 grp_start_p
= list_p
;
12526 grp_end
= (*groups
)[grpnum
].grp_end
;
12529 decl
= OMP_CLAUSE_DECL (c
);
12531 if (error_operand_p (decl
))
12537 if (!omp_parse_expr (addr_tokens
, decl
))
12545 if (DECL_P (decl
) && outer_ctx
&& (region_type
& ORT_ACC
))
12547 struct gimplify_omp_ctx
*octx
;
12548 for (octx
= outer_ctx
; octx
; octx
= octx
->outer_context
)
12550 if (octx
->region_type
!= ORT_ACC_HOST_DATA
)
12553 = splay_tree_lookup (octx
->variables
,
12554 (splay_tree_key
) decl
);
12556 error_at (OMP_CLAUSE_LOCATION (c
), "variable %qE "
12557 "declared in enclosing %<host_data%> region",
12562 map_descriptor
= false;
12564 /* This condition checks if we're mapping an array descriptor that
12565 isn't inside a derived type -- these have special handling, and
12566 are not handled as structs in omp_build_struct_sibling_lists.
12567 See that function for further details. */
12568 if (*grp_start_p
!= grp_end
12569 && OMP_CLAUSE_CHAIN (*grp_start_p
)
12570 && OMP_CLAUSE_CHAIN (*grp_start_p
) != grp_end
)
12572 tree grp_mid
= OMP_CLAUSE_CHAIN (*grp_start_p
);
12573 if (omp_map_clause_descriptor_p (grp_mid
)
12574 && DECL_P (OMP_CLAUSE_DECL (grp_mid
)))
12575 map_descriptor
= true;
12577 else if (OMP_CLAUSE_CODE (grp_end
) == OMP_CLAUSE_MAP
12578 && (OMP_CLAUSE_MAP_KIND (grp_end
) == GOMP_MAP_RELEASE
12579 || OMP_CLAUSE_MAP_KIND (grp_end
) == GOMP_MAP_DELETE
)
12580 && OMP_CLAUSE_RELEASE_DESCRIPTOR (grp_end
))
12581 map_descriptor
= true;
12583 /* Adding the decl for a struct access: we haven't created
12584 GOMP_MAP_STRUCT nodes yet, so this statement needs to predict
12585 whether they will be created in gimplify_adjust_omp_clauses.
12586 NOTE: Technically we should probably look through DECL_VALUE_EXPR
12587 here because something that looks like a DECL_P may actually be a
12588 struct access, e.g. variables in a lambda closure
12589 (__closure->__foo) or class members (this->foo). Currently in both
12590 those cases we map the whole of the containing object (directly in
12591 the C++ FE) though, so struct nodes are not created. */
12593 && addr_tokens
[0]->type
== STRUCTURE_BASE
12594 && addr_tokens
[0]->u
.structure_base_kind
== BASE_DECL
12595 && !map_descriptor
)
12597 gcc_assert (addr_tokens
[1]->type
== ACCESS_METHOD
);
12598 /* If we got to this struct via a chain of pointers, maybe we
12599 want to map it implicitly instead. */
12600 if (omp_access_chain_p (addr_tokens
, 1))
12602 omp_mapping_group
*wholestruct
;
12603 if (!(region_type
& ORT_ACC
)
12604 && omp_mapped_by_containing_struct (grpmap
,
12605 OMP_CLAUSE_DECL (c
),
12608 decl
= addr_tokens
[1]->expr
;
12609 if (splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
))
12611 /* Standalone attach or detach clauses for a struct element
12612 should not inhibit implicit mapping of the whole struct. */
12613 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
12614 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)
12616 flags
= GOVD_MAP
| GOVD_EXPLICIT
;
12618 gcc_assert (addr_tokens
[1]->u
.access_kind
!= ACCESS_DIRECT
12619 || TREE_ADDRESSABLE (decl
));
12623 if (!DECL_P (decl
))
12625 tree d
= decl
, *pd
;
12626 if (TREE_CODE (d
) == ARRAY_REF
)
12628 while (TREE_CODE (d
) == ARRAY_REF
)
12629 d
= TREE_OPERAND (d
, 0);
12630 if (TREE_CODE (d
) == COMPONENT_REF
12631 && TREE_CODE (TREE_TYPE (d
)) == ARRAY_TYPE
)
12634 pd
= &OMP_CLAUSE_DECL (c
);
12636 && TREE_CODE (decl
) == INDIRECT_REF
12637 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
12638 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
12640 && (OMP_CLAUSE_MAP_KIND (c
)
12641 != GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION
))
12643 pd
= &TREE_OPERAND (decl
, 0);
12644 decl
= TREE_OPERAND (decl
, 0);
12647 if (addr_tokens
[0]->type
== STRUCTURE_BASE
12648 && addr_tokens
[0]->u
.structure_base_kind
== BASE_DECL
12649 && addr_tokens
[1]->type
== ACCESS_METHOD
12650 && (addr_tokens
[1]->u
.access_kind
== ACCESS_POINTER
12651 || (addr_tokens
[1]->u
.access_kind
12652 == ACCESS_POINTER_OFFSET
))
12653 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c
)))
12655 tree base
= addr_tokens
[1]->expr
;
12657 = splay_tree_lookup (ctx
->variables
,
12658 (splay_tree_key
) base
);
12659 n
->value
|= GOVD_SEEN
;
12662 if (code
== OMP_TARGET
&& OMP_CLAUSE_MAP_IN_REDUCTION (c
))
12664 /* Don't gimplify *pd fully at this point, as the base
12665 will need to be adjusted during omp lowering. */
12666 auto_vec
<tree
, 10> expr_stack
;
12668 while (handled_component_p (*p
)
12669 || TREE_CODE (*p
) == INDIRECT_REF
12670 || TREE_CODE (*p
) == ADDR_EXPR
12671 || TREE_CODE (*p
) == MEM_REF
12672 || TREE_CODE (*p
) == NON_LVALUE_EXPR
)
12674 expr_stack
.safe_push (*p
);
12675 p
= &TREE_OPERAND (*p
, 0);
12677 for (int i
= expr_stack
.length () - 1; i
>= 0; i
--)
12679 tree t
= expr_stack
[i
];
12680 if (TREE_CODE (t
) == ARRAY_REF
12681 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
12683 if (TREE_OPERAND (t
, 2) == NULL_TREE
)
12685 tree low
= unshare_expr (array_ref_low_bound (t
));
12686 if (!is_gimple_min_invariant (low
))
12688 TREE_OPERAND (t
, 2) = low
;
12689 if (gimplify_expr (&TREE_OPERAND (t
, 2),
12692 fb_rvalue
) == GS_ERROR
)
12696 else if (gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
,
12697 NULL
, is_gimple_reg
,
12698 fb_rvalue
) == GS_ERROR
)
12700 if (TREE_OPERAND (t
, 3) == NULL_TREE
)
12702 tree elmt_size
= array_ref_element_size (t
);
12703 if (!is_gimple_min_invariant (elmt_size
))
12705 elmt_size
= unshare_expr (elmt_size
);
12707 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t
,
12710 = size_int (TYPE_ALIGN_UNIT (elmt_type
));
12712 = size_binop (EXACT_DIV_EXPR
, elmt_size
,
12714 TREE_OPERAND (t
, 3) = elmt_size
;
12715 if (gimplify_expr (&TREE_OPERAND (t
, 3),
12718 fb_rvalue
) == GS_ERROR
)
12722 else if (gimplify_expr (&TREE_OPERAND (t
, 3), pre_p
,
12723 NULL
, is_gimple_reg
,
12724 fb_rvalue
) == GS_ERROR
)
12727 else if (TREE_CODE (t
) == COMPONENT_REF
)
12729 if (TREE_OPERAND (t
, 2) == NULL_TREE
)
12731 tree offset
= component_ref_field_offset (t
);
12732 if (!is_gimple_min_invariant (offset
))
12734 offset
= unshare_expr (offset
);
12735 tree field
= TREE_OPERAND (t
, 1);
12737 = size_int (DECL_OFFSET_ALIGN (field
)
12739 offset
= size_binop (EXACT_DIV_EXPR
, offset
,
12741 TREE_OPERAND (t
, 2) = offset
;
12742 if (gimplify_expr (&TREE_OPERAND (t
, 2),
12745 fb_rvalue
) == GS_ERROR
)
12749 else if (gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
,
12750 NULL
, is_gimple_reg
,
12751 fb_rvalue
) == GS_ERROR
)
12755 for (; expr_stack
.length () > 0; )
12757 tree t
= expr_stack
.pop ();
12759 if (TREE_CODE (t
) == ARRAY_REF
12760 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
12762 if (!is_gimple_min_invariant (TREE_OPERAND (t
, 1))
12763 && gimplify_expr (&TREE_OPERAND (t
, 1), pre_p
,
12764 NULL
, is_gimple_val
,
12765 fb_rvalue
) == GS_ERROR
)
12773 if ((code
== OMP_TARGET
12774 || code
== OMP_TARGET_DATA
12775 || code
== OMP_TARGET_ENTER_DATA
12776 || code
== OMP_TARGET_EXIT_DATA
)
12777 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
)
12779 /* If we have attach/detach but the decl we have is a pointer to
12780 pointer, we're probably mapping the "base level" array
12781 implicitly. Make sure we don't add the decl as if we mapped
12782 it explicitly. That is,
12786 #pragma omp target map(arr[a][b:c])
12788 should *not* map "arr" explicitly. That way we get a
12789 zero-length "alloc" mapping for it, and assuming it's been
12790 mapped by some previous directive, etc., things work as they
12793 tree basetype
= TREE_TYPE (addr_tokens
[0]->expr
);
12795 if (TREE_CODE (basetype
) == REFERENCE_TYPE
)
12796 basetype
= TREE_TYPE (basetype
);
12798 if (code
== OMP_TARGET
12799 && addr_tokens
[0]->type
== ARRAY_BASE
12800 && addr_tokens
[0]->u
.structure_base_kind
== BASE_DECL
12801 && TREE_CODE (basetype
) == POINTER_TYPE
12802 && TREE_CODE (TREE_TYPE (basetype
)) == POINTER_TYPE
)
12806 flags
= GOVD_MAP
| GOVD_EXPLICIT
;
12807 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_TO
12808 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_TOFROM
12809 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_PRESENT_TO
12810 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_PRESENT_TOFROM
)
12811 flags
|= GOVD_MAP_ALWAYS_TO
;
12815 case OMP_CLAUSE_AFFINITY
:
12816 gimplify_omp_affinity (list_p
, pre_p
);
12819 case OMP_CLAUSE_DOACROSS
:
12820 if (OMP_CLAUSE_DOACROSS_KIND (c
) == OMP_CLAUSE_DOACROSS_SINK
)
12822 tree deps
= OMP_CLAUSE_DECL (c
);
12823 while (deps
&& TREE_CODE (deps
) == TREE_LIST
)
12825 if (TREE_CODE (TREE_PURPOSE (deps
)) == TRUNC_DIV_EXPR
12826 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps
), 1)))
12827 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps
), 1),
12828 pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
12829 deps
= TREE_CHAIN (deps
);
12833 gcc_assert (OMP_CLAUSE_DOACROSS_KIND (c
)
12834 == OMP_CLAUSE_DOACROSS_SOURCE
);
12836 case OMP_CLAUSE_DEPEND
:
12837 if (handled_depend_iterators
== -1)
12838 handled_depend_iterators
= gimplify_omp_depend (list_p
, pre_p
);
12839 if (handled_depend_iterators
)
12841 if (handled_depend_iterators
== 2)
12845 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPOUND_EXPR
)
12847 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0), pre_p
,
12848 NULL
, is_gimple_val
, fb_rvalue
);
12849 OMP_CLAUSE_DECL (c
) = TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
12851 if (error_operand_p (OMP_CLAUSE_DECL (c
)))
12856 if (OMP_CLAUSE_DECL (c
) != null_pointer_node
)
12858 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (OMP_CLAUSE_DECL (c
));
12859 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
, NULL
,
12860 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
12866 if (code
== OMP_TASK
)
12867 ctx
->has_depend
= true;
12870 case OMP_CLAUSE_TO
:
12871 case OMP_CLAUSE_FROM
:
12872 case OMP_CLAUSE__CACHE_
:
12873 decl
= OMP_CLAUSE_DECL (c
);
12874 if (error_operand_p (decl
))
12879 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
12880 OMP_CLAUSE_SIZE (c
) = DECL_P (decl
) ? DECL_SIZE_UNIT (decl
)
12881 : TYPE_SIZE_UNIT (TREE_TYPE (decl
));
12882 if (gimplify_expr (&OMP_CLAUSE_SIZE (c
), pre_p
,
12883 NULL
, is_gimple_val
, fb_rvalue
) == GS_ERROR
)
12888 if (!DECL_P (decl
))
12890 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
,
12891 NULL
, is_gimple_lvalue
, fb_lvalue
)
12901 case OMP_CLAUSE_USE_DEVICE_PTR
:
12902 case OMP_CLAUSE_USE_DEVICE_ADDR
:
12903 flags
= GOVD_EXPLICIT
;
12906 case OMP_CLAUSE_HAS_DEVICE_ADDR
:
12907 decl
= OMP_CLAUSE_DECL (c
);
12908 while (TREE_CODE (decl
) == INDIRECT_REF
12909 || TREE_CODE (decl
) == ARRAY_REF
)
12910 decl
= TREE_OPERAND (decl
, 0);
12911 flags
= GOVD_EXPLICIT
;
12914 case OMP_CLAUSE_IS_DEVICE_PTR
:
12915 flags
= GOVD_FIRSTPRIVATE
| GOVD_EXPLICIT
;
12919 decl
= OMP_CLAUSE_DECL (c
);
12921 if (error_operand_p (decl
))
12926 if (DECL_NAME (decl
) == NULL_TREE
&& (flags
& GOVD_SHARED
) == 0)
12928 tree t
= omp_member_access_dummy_var (decl
);
12931 tree v
= DECL_VALUE_EXPR (decl
);
12932 DECL_NAME (decl
) = DECL_NAME (TREE_OPERAND (v
, 1));
12934 omp_notice_variable (outer_ctx
, t
, true);
12937 if (code
== OACC_DATA
12938 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12939 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
12940 flags
|= GOVD_MAP_0LEN_ARRAY
;
12941 omp_add_variable (ctx
, decl
, flags
);
12942 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
12943 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
12944 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
12945 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
12947 struct gimplify_omp_ctx
*pctx
12948 = code
== OMP_TARGET
? outer_ctx
: ctx
;
12950 omp_add_variable (pctx
, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
),
12951 GOVD_LOCAL
| GOVD_SEEN
);
12953 && OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
)
12954 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c
),
12956 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
),
12957 NULL
) == NULL_TREE
)
12958 omp_add_variable (pctx
,
12959 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
),
12960 GOVD_LOCAL
| GOVD_SEEN
);
12961 gimplify_omp_ctxp
= pctx
;
12962 push_gimplify_context ();
12964 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
12965 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
12967 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c
),
12968 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
));
12969 pop_gimplify_context
12970 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
)));
12971 push_gimplify_context ();
12972 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c
),
12973 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
12974 pop_gimplify_context
12975 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
)));
12976 OMP_CLAUSE_REDUCTION_INIT (c
) = NULL_TREE
;
12977 OMP_CLAUSE_REDUCTION_MERGE (c
) = NULL_TREE
;
12979 gimplify_omp_ctxp
= outer_ctx
;
12981 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
12982 && OMP_CLAUSE_LASTPRIVATE_STMT (c
))
12984 gimplify_omp_ctxp
= ctx
;
12985 push_gimplify_context ();
12986 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c
)) != BIND_EXPR
)
12988 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
,
12990 TREE_SIDE_EFFECTS (bind
) = 1;
12991 BIND_EXPR_BODY (bind
) = OMP_CLAUSE_LASTPRIVATE_STMT (c
);
12992 OMP_CLAUSE_LASTPRIVATE_STMT (c
) = bind
;
12994 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c
),
12995 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
));
12996 pop_gimplify_context
12997 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
)));
12998 OMP_CLAUSE_LASTPRIVATE_STMT (c
) = NULL_TREE
;
13000 gimplify_omp_ctxp
= outer_ctx
;
13002 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
13003 && OMP_CLAUSE_LINEAR_STMT (c
))
13005 gimplify_omp_ctxp
= ctx
;
13006 push_gimplify_context ();
13007 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c
)) != BIND_EXPR
)
13009 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
,
13011 TREE_SIDE_EFFECTS (bind
) = 1;
13012 BIND_EXPR_BODY (bind
) = OMP_CLAUSE_LINEAR_STMT (c
);
13013 OMP_CLAUSE_LINEAR_STMT (c
) = bind
;
13015 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c
),
13016 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
));
13017 pop_gimplify_context
13018 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
)));
13019 OMP_CLAUSE_LINEAR_STMT (c
) = NULL_TREE
;
13021 gimplify_omp_ctxp
= outer_ctx
;
13027 case OMP_CLAUSE_COPYIN
:
13028 case OMP_CLAUSE_COPYPRIVATE
:
13029 decl
= OMP_CLAUSE_DECL (c
);
13030 if (error_operand_p (decl
))
13035 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_COPYPRIVATE
13037 && !omp_check_private (ctx
, decl
, true))
13040 if (is_global_var (decl
))
13042 if (DECL_THREAD_LOCAL_P (decl
))
13044 else if (DECL_HAS_VALUE_EXPR_P (decl
))
13046 tree value
= get_base_address (DECL_VALUE_EXPR (decl
));
13050 && DECL_THREAD_LOCAL_P (value
))
13055 error_at (OMP_CLAUSE_LOCATION (c
),
13056 "copyprivate variable %qE is not threadprivate"
13057 " or private in outer context", DECL_NAME (decl
));
13060 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
13061 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
13062 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
)
13064 && ((region_type
& ORT_TASKLOOP
) == ORT_TASKLOOP
13065 || (region_type
== ORT_WORKSHARE
13066 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
13067 && (OMP_CLAUSE_REDUCTION_INSCAN (c
)
13068 || code
== OMP_LOOP
)))
13069 && (outer_ctx
->region_type
== ORT_COMBINED_PARALLEL
13070 || (code
== OMP_LOOP
13071 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
13072 && ((outer_ctx
->region_type
& ORT_COMBINED_TEAMS
)
13073 == ORT_COMBINED_TEAMS
))))
13076 = splay_tree_lookup (outer_ctx
->variables
,
13077 (splay_tree_key
)decl
);
13078 if (on
== NULL
|| (on
->value
& GOVD_DATA_SHARE_CLASS
) == 0)
13080 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
13081 && TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
13082 && (TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
13083 || (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
13084 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl
)))
13085 == POINTER_TYPE
))))
13086 omp_firstprivatize_variable (outer_ctx
, decl
);
13089 omp_add_variable (outer_ctx
, decl
,
13090 GOVD_SEEN
| GOVD_SHARED
);
13091 if (outer_ctx
->outer_context
)
13092 omp_notice_variable (outer_ctx
->outer_context
, decl
,
13098 omp_notice_variable (outer_ctx
, decl
, true);
13099 if (check_non_private
13100 && (region_type
== ORT_WORKSHARE
|| code
== OMP_SCOPE
)
13101 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
13102 || decl
== OMP_CLAUSE_DECL (c
)
13103 || (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
13104 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
13106 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
13107 == POINTER_PLUS_EXPR
13108 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
13109 (OMP_CLAUSE_DECL (c
), 0), 0))
13111 && omp_check_private (ctx
, decl
, false))
13113 error ("%s variable %qE is private in outer context",
13114 check_non_private
, DECL_NAME (decl
));
13119 case OMP_CLAUSE_DETACH
:
13120 flags
= GOVD_FIRSTPRIVATE
| GOVD_SEEN
;
13123 case OMP_CLAUSE_IF
:
13124 if (OMP_CLAUSE_IF_MODIFIER (c
) != ERROR_MARK
13125 && OMP_CLAUSE_IF_MODIFIER (c
) != code
)
13128 for (int i
= 0; i
< 2; i
++)
13129 switch (i
? OMP_CLAUSE_IF_MODIFIER (c
) : code
)
13131 case VOID_CST
: p
[i
] = "cancel"; break;
13132 case OMP_PARALLEL
: p
[i
] = "parallel"; break;
13133 case OMP_SIMD
: p
[i
] = "simd"; break;
13134 case OMP_TASK
: p
[i
] = "task"; break;
13135 case OMP_TASKLOOP
: p
[i
] = "taskloop"; break;
13136 case OMP_TARGET_DATA
: p
[i
] = "target data"; break;
13137 case OMP_TARGET
: p
[i
] = "target"; break;
13138 case OMP_TARGET_UPDATE
: p
[i
] = "target update"; break;
13139 case OMP_TARGET_ENTER_DATA
:
13140 p
[i
] = "target enter data"; break;
13141 case OMP_TARGET_EXIT_DATA
: p
[i
] = "target exit data"; break;
13142 default: gcc_unreachable ();
13144 error_at (OMP_CLAUSE_LOCATION (c
),
13145 "expected %qs %<if%> clause modifier rather than %qs",
13149 /* Fall through. */
13151 case OMP_CLAUSE_SELF
:
13152 case OMP_CLAUSE_FINAL
:
13153 OMP_CLAUSE_OPERAND (c
, 0)
13154 = gimple_boolify (OMP_CLAUSE_OPERAND (c
, 0));
13155 /* Fall through. */
13157 case OMP_CLAUSE_NUM_TEAMS
:
13158 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_NUM_TEAMS
13159 && OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
)
13160 && !is_gimple_min_invariant (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
)))
13162 if (error_operand_p (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
)))
13167 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
)
13168 = get_initialized_tmp_var (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
),
13169 pre_p
, NULL
, true);
13171 /* Fall through. */
13173 case OMP_CLAUSE_SCHEDULE
:
13174 case OMP_CLAUSE_NUM_THREADS
:
13175 case OMP_CLAUSE_THREAD_LIMIT
:
13176 case OMP_CLAUSE_DIST_SCHEDULE
:
13177 case OMP_CLAUSE_DEVICE
:
13178 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEVICE
13179 && OMP_CLAUSE_DEVICE_ANCESTOR (c
))
13181 if (code
!= OMP_TARGET
)
13183 error_at (OMP_CLAUSE_LOCATION (c
),
13184 "%<device%> clause with %<ancestor%> is only "
13185 "allowed on %<target%> construct");
13190 tree clauses
= *orig_list_p
;
13191 for (; clauses
; clauses
= OMP_CLAUSE_CHAIN (clauses
))
13192 if (OMP_CLAUSE_CODE (clauses
) != OMP_CLAUSE_DEVICE
13193 && OMP_CLAUSE_CODE (clauses
) != OMP_CLAUSE_FIRSTPRIVATE
13194 && OMP_CLAUSE_CODE (clauses
) != OMP_CLAUSE_PRIVATE
13195 && OMP_CLAUSE_CODE (clauses
) != OMP_CLAUSE_DEFAULTMAP
13196 && OMP_CLAUSE_CODE (clauses
) != OMP_CLAUSE_MAP
13199 error_at (OMP_CLAUSE_LOCATION (c
),
13200 "with %<ancestor%>, only the %<device%>, "
13201 "%<firstprivate%>, %<private%>, %<defaultmap%>, "
13202 "and %<map%> clauses may appear on the "
13208 /* Fall through. */
13210 case OMP_CLAUSE_PRIORITY
:
13211 case OMP_CLAUSE_GRAINSIZE
:
13212 case OMP_CLAUSE_NUM_TASKS
:
13213 case OMP_CLAUSE_FILTER
:
13214 case OMP_CLAUSE_HINT
:
13215 case OMP_CLAUSE_ASYNC
:
13216 case OMP_CLAUSE_WAIT
:
13217 case OMP_CLAUSE_NUM_GANGS
:
13218 case OMP_CLAUSE_NUM_WORKERS
:
13219 case OMP_CLAUSE_VECTOR_LENGTH
:
13220 case OMP_CLAUSE_WORKER
:
13221 case OMP_CLAUSE_VECTOR
:
13222 if (OMP_CLAUSE_OPERAND (c
, 0)
13223 && !is_gimple_min_invariant (OMP_CLAUSE_OPERAND (c
, 0)))
13225 if (error_operand_p (OMP_CLAUSE_OPERAND (c
, 0)))
13230 /* All these clauses care about value, not a particular decl,
13231 so try to force it into a SSA_NAME or fresh temporary. */
13232 OMP_CLAUSE_OPERAND (c
, 0)
13233 = get_initialized_tmp_var (OMP_CLAUSE_OPERAND (c
, 0),
13234 pre_p
, NULL
, true);
13238 case OMP_CLAUSE_GANG
:
13239 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c
, 0), pre_p
, NULL
,
13240 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
13242 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c
, 1), pre_p
, NULL
,
13243 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
13247 case OMP_CLAUSE_NOWAIT
:
13251 case OMP_CLAUSE_ORDERED
:
13252 case OMP_CLAUSE_UNTIED
:
13253 case OMP_CLAUSE_COLLAPSE
:
13254 case OMP_CLAUSE_TILE
:
13255 case OMP_CLAUSE_AUTO
:
13256 case OMP_CLAUSE_SEQ
:
13257 case OMP_CLAUSE_INDEPENDENT
:
13258 case OMP_CLAUSE_MERGEABLE
:
13259 case OMP_CLAUSE_PROC_BIND
:
13260 case OMP_CLAUSE_SAFELEN
:
13261 case OMP_CLAUSE_SIMDLEN
:
13262 case OMP_CLAUSE_NOGROUP
:
13263 case OMP_CLAUSE_THREADS
:
13264 case OMP_CLAUSE_SIMD
:
13265 case OMP_CLAUSE_BIND
:
13266 case OMP_CLAUSE_IF_PRESENT
:
13267 case OMP_CLAUSE_FINALIZE
:
13270 case OMP_CLAUSE_ORDER
:
13271 ctx
->order_concurrent
= true;
13274 case OMP_CLAUSE_DEFAULTMAP
:
13275 enum gimplify_defaultmap_kind gdmkmin
, gdmkmax
;
13276 switch (OMP_CLAUSE_DEFAULTMAP_CATEGORY (c
))
13278 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED
:
13279 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALL
:
13280 gdmkmin
= GDMK_SCALAR
;
13281 gdmkmax
= GDMK_POINTER
;
13283 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR
:
13284 gdmkmin
= GDMK_SCALAR
;
13285 gdmkmax
= GDMK_SCALAR_TARGET
;
13287 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE
:
13288 gdmkmin
= gdmkmax
= GDMK_AGGREGATE
;
13290 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE
:
13291 gdmkmin
= gdmkmax
= GDMK_ALLOCATABLE
;
13293 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER
:
13294 gdmkmin
= gdmkmax
= GDMK_POINTER
;
13297 gcc_unreachable ();
13299 for (int gdmk
= gdmkmin
; gdmk
<= gdmkmax
; gdmk
++)
13300 switch (OMP_CLAUSE_DEFAULTMAP_BEHAVIOR (c
))
13302 case OMP_CLAUSE_DEFAULTMAP_ALLOC
:
13303 ctx
->defaultmap
[gdmk
] = GOVD_MAP
| GOVD_MAP_ALLOC_ONLY
;
13305 case OMP_CLAUSE_DEFAULTMAP_TO
:
13306 ctx
->defaultmap
[gdmk
] = GOVD_MAP
| GOVD_MAP_TO_ONLY
;
13308 case OMP_CLAUSE_DEFAULTMAP_FROM
:
13309 ctx
->defaultmap
[gdmk
] = GOVD_MAP
| GOVD_MAP_FROM_ONLY
;
13311 case OMP_CLAUSE_DEFAULTMAP_TOFROM
:
13312 ctx
->defaultmap
[gdmk
] = GOVD_MAP
;
13314 case OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE
:
13315 ctx
->defaultmap
[gdmk
] = GOVD_FIRSTPRIVATE
;
13317 case OMP_CLAUSE_DEFAULTMAP_NONE
:
13318 ctx
->defaultmap
[gdmk
] = 0;
13320 case OMP_CLAUSE_DEFAULTMAP_PRESENT
:
13321 ctx
->defaultmap
[gdmk
] = GOVD_MAP
| GOVD_MAP_FORCE_PRESENT
;
13323 case OMP_CLAUSE_DEFAULTMAP_DEFAULT
:
13327 ctx
->defaultmap
[gdmk
] = GOVD_FIRSTPRIVATE
;
13329 case GDMK_SCALAR_TARGET
:
13330 ctx
->defaultmap
[gdmk
] = (lang_GNU_Fortran ()
13331 ? GOVD_MAP
: GOVD_FIRSTPRIVATE
);
13333 case GDMK_AGGREGATE
:
13334 case GDMK_ALLOCATABLE
:
13335 ctx
->defaultmap
[gdmk
] = GOVD_MAP
;
13338 ctx
->defaultmap
[gdmk
] = GOVD_MAP
;
13339 if (!lang_GNU_Fortran ())
13340 ctx
->defaultmap
[gdmk
] |= GOVD_MAP_0LEN_ARRAY
;
13343 gcc_unreachable ();
13347 gcc_unreachable ();
13351 case OMP_CLAUSE_ALIGNED
:
13352 decl
= OMP_CLAUSE_DECL (c
);
13353 if (error_operand_p (decl
))
13358 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c
), pre_p
, NULL
,
13359 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
13364 if (!is_global_var (decl
)
13365 && TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
)
13366 omp_add_variable (ctx
, decl
, GOVD_ALIGNED
);
13369 case OMP_CLAUSE_NONTEMPORAL
:
13370 decl
= OMP_CLAUSE_DECL (c
);
13371 if (error_operand_p (decl
))
13376 omp_add_variable (ctx
, decl
, GOVD_NONTEMPORAL
);
13379 case OMP_CLAUSE_ALLOCATE
:
13380 decl
= OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
);
13382 && TREE_CODE (decl
) == INTEGER_CST
13383 && wi::eq_p (wi::to_widest (decl
), GOMP_OMP_PREDEF_ALLOC_THREADS
)
13384 && (code
== OMP_TARGET
|| code
== OMP_TASK
|| code
== OMP_TASKLOOP
))
13385 warning_at (OMP_CLAUSE_LOCATION (c
), OPT_Wopenmp
,
13386 "allocator with access trait set to %<thread%> "
13387 "results in undfined behavior for %qs directive",
13388 code
== OMP_TARGET
? "target"
13389 : (code
== OMP_TASK
13390 ? "task" : "taskloop"));
13391 decl
= OMP_CLAUSE_DECL (c
);
13392 if (error_operand_p (decl
))
13397 if (gimplify_expr (&OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
), pre_p
, NULL
,
13398 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
13403 else if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
) == NULL_TREE
13404 || (TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
))
13407 else if (code
== OMP_TASKLOOP
13408 || !DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)))
13409 OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)
13410 = get_initialized_tmp_var (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
),
13411 pre_p
, NULL
, false);
13414 case OMP_CLAUSE_DEFAULT
:
13415 ctx
->default_kind
= OMP_CLAUSE_DEFAULT_KIND (c
);
13418 case OMP_CLAUSE_INCLUSIVE
:
13419 case OMP_CLAUSE_EXCLUSIVE
:
13420 decl
= OMP_CLAUSE_DECL (c
);
13422 splay_tree_node n
= splay_tree_lookup (outer_ctx
->variables
,
13423 (splay_tree_key
) decl
);
13424 if (n
== NULL
|| (n
->value
& GOVD_REDUCTION
) == 0)
13426 error_at (OMP_CLAUSE_LOCATION (c
),
13427 "%qD specified in %qs clause but not in %<inscan%> "
13428 "%<reduction%> clause on the containing construct",
13429 decl
, omp_clause_code_name
[OMP_CLAUSE_CODE (c
)]);
13434 n
->value
|= GOVD_REDUCTION_INSCAN
;
13435 if (outer_ctx
->region_type
== ORT_SIMD
13436 && outer_ctx
->outer_context
13437 && outer_ctx
->outer_context
->region_type
== ORT_WORKSHARE
)
13439 n
= splay_tree_lookup (outer_ctx
->outer_context
->variables
,
13440 (splay_tree_key
) decl
);
13441 if (n
&& (n
->value
& GOVD_REDUCTION
) != 0)
13442 n
->value
|= GOVD_REDUCTION_INSCAN
;
13448 case OMP_CLAUSE_NOHOST
:
13450 gcc_unreachable ();
13453 if (code
== OACC_DATA
13454 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
13455 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
13456 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
13459 *list_p
= OMP_CLAUSE_CHAIN (c
);
13461 list_p
= &OMP_CLAUSE_CHAIN (c
);
13470 ctx
->clauses
= *orig_list_p
;
13471 gimplify_omp_ctxp
= ctx
;
13474 /* Return true if DECL is a candidate for shared to firstprivate
13475 optimization. We only consider non-addressable scalars, not
13476 too big, and not references. */
13479 omp_shared_to_firstprivate_optimizable_decl_p (tree decl
)
13481 if (TREE_ADDRESSABLE (decl
))
13483 tree type
= TREE_TYPE (decl
);
13484 if (!is_gimple_reg_type (type
)
13485 || TREE_CODE (type
) == REFERENCE_TYPE
13486 || TREE_ADDRESSABLE (type
))
13488 /* Don't optimize too large decls, as each thread/task will have
13490 HOST_WIDE_INT len
= int_size_in_bytes (type
);
13491 if (len
== -1 || len
> 4 * POINTER_SIZE
/ BITS_PER_UNIT
)
13493 if (omp_privatize_by_reference (decl
))
13498 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
13499 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
13500 GOVD_WRITTEN in outer contexts. */
13503 omp_mark_stores (struct gimplify_omp_ctx
*ctx
, tree decl
)
13505 for (; ctx
; ctx
= ctx
->outer_context
)
13507 splay_tree_node n
= splay_tree_lookup (ctx
->variables
,
13508 (splay_tree_key
) decl
);
13511 else if (n
->value
& GOVD_SHARED
)
13513 n
->value
|= GOVD_WRITTEN
;
13516 else if (n
->value
& GOVD_DATA_SHARE_CLASS
)
13521 /* Helper callback for walk_gimple_seq to discover possible stores
13522 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
13523 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
13527 omp_find_stores_op (tree
*tp
, int *walk_subtrees
, void *data
)
13529 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
13531 *walk_subtrees
= 0;
13538 if (handled_component_p (op
))
13539 op
= TREE_OPERAND (op
, 0);
13540 else if ((TREE_CODE (op
) == MEM_REF
|| TREE_CODE (op
) == TARGET_MEM_REF
)
13541 && TREE_CODE (TREE_OPERAND (op
, 0)) == ADDR_EXPR
)
13542 op
= TREE_OPERAND (TREE_OPERAND (op
, 0), 0);
13547 if (!DECL_P (op
) || !omp_shared_to_firstprivate_optimizable_decl_p (op
))
13550 omp_mark_stores (gimplify_omp_ctxp
, op
);
13554 /* Helper callback for walk_gimple_seq to discover possible stores
13555 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
13556 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
13560 omp_find_stores_stmt (gimple_stmt_iterator
*gsi_p
,
13561 bool *handled_ops_p
,
13562 struct walk_stmt_info
*wi
)
13564 gimple
*stmt
= gsi_stmt (*gsi_p
);
13565 switch (gimple_code (stmt
))
13567 /* Don't recurse on OpenMP constructs for which
13568 gimplify_adjust_omp_clauses already handled the bodies,
13569 except handle gimple_omp_for_pre_body. */
13570 case GIMPLE_OMP_FOR
:
13571 *handled_ops_p
= true;
13572 if (gimple_omp_for_pre_body (stmt
))
13573 walk_gimple_seq (gimple_omp_for_pre_body (stmt
),
13574 omp_find_stores_stmt
, omp_find_stores_op
, wi
);
13576 case GIMPLE_OMP_PARALLEL
:
13577 case GIMPLE_OMP_TASK
:
13578 case GIMPLE_OMP_SECTIONS
:
13579 case GIMPLE_OMP_SINGLE
:
13580 case GIMPLE_OMP_SCOPE
:
13581 case GIMPLE_OMP_TARGET
:
13582 case GIMPLE_OMP_TEAMS
:
13583 case GIMPLE_OMP_CRITICAL
:
13584 *handled_ops_p
= true;
13592 struct gimplify_adjust_omp_clauses_data
13598 /* For all variables that were not actually used within the context,
13599 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
13602 gimplify_adjust_omp_clauses_1 (splay_tree_node n
, void *data
)
13604 tree
*list_p
= ((struct gimplify_adjust_omp_clauses_data
*) data
)->list_p
;
13606 = ((struct gimplify_adjust_omp_clauses_data
*) data
)->pre_p
;
13607 tree decl
= (tree
) n
->key
;
13608 unsigned flags
= n
->value
;
13609 enum omp_clause_code code
;
13611 bool private_debug
;
13613 if (gimplify_omp_ctxp
->region_type
== ORT_COMBINED_PARALLEL
13614 && (flags
& GOVD_LASTPRIVATE_CONDITIONAL
) != 0)
13615 flags
= GOVD_SHARED
| GOVD_SEEN
| GOVD_WRITTEN
;
13616 if (flags
& (GOVD_EXPLICIT
| GOVD_LOCAL
))
13618 if ((flags
& GOVD_SEEN
) == 0)
13620 if (flags
& GOVD_DEBUG_PRIVATE
)
13622 gcc_assert ((flags
& GOVD_DATA_SHARE_CLASS
) == GOVD_SHARED
);
13623 private_debug
= true;
13625 else if (flags
& GOVD_MAP
)
13626 private_debug
= false;
13629 = lang_hooks
.decls
.omp_private_debug_clause (decl
,
13630 !!(flags
& GOVD_SHARED
));
13632 code
= OMP_CLAUSE_PRIVATE
;
13633 else if (flags
& GOVD_MAP
)
13635 code
= OMP_CLAUSE_MAP
;
13636 if ((gimplify_omp_ctxp
->region_type
& ORT_ACC
) == 0
13637 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl
))))
13639 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl
);
13643 && DECL_IN_CONSTANT_POOL (decl
)
13644 && !lookup_attribute ("omp declare target",
13645 DECL_ATTRIBUTES (decl
)))
13647 tree id
= get_identifier ("omp declare target");
13648 DECL_ATTRIBUTES (decl
)
13649 = tree_cons (id
, NULL_TREE
, DECL_ATTRIBUTES (decl
));
13650 varpool_node
*node
= varpool_node::get (decl
);
13653 node
->offloadable
= 1;
13654 if (ENABLE_OFFLOADING
)
13655 g
->have_offload
= true;
13659 else if (flags
& GOVD_SHARED
)
13661 if (is_global_var (decl
))
13663 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
->outer_context
;
13664 while (ctx
!= NULL
)
13667 = splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
13668 if (on
&& (on
->value
& (GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE
13669 | GOVD_PRIVATE
| GOVD_REDUCTION
13670 | GOVD_LINEAR
| GOVD_MAP
)) != 0)
13672 ctx
= ctx
->outer_context
;
13677 code
= OMP_CLAUSE_SHARED
;
13678 /* Don't optimize shared into firstprivate for read-only vars
13679 on tasks with depend clause, we shouldn't try to copy them
13680 until the dependencies are satisfied. */
13681 if (gimplify_omp_ctxp
->has_depend
)
13682 flags
|= GOVD_WRITTEN
;
13684 else if (flags
& GOVD_PRIVATE
)
13685 code
= OMP_CLAUSE_PRIVATE
;
13686 else if (flags
& GOVD_FIRSTPRIVATE
)
13688 code
= OMP_CLAUSE_FIRSTPRIVATE
;
13689 if ((gimplify_omp_ctxp
->region_type
& ORT_TARGET
)
13690 && (gimplify_omp_ctxp
->region_type
& ORT_ACC
) == 0
13691 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl
))))
13693 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
13694 "%<target%> construct", decl
);
13698 else if (flags
& GOVD_LASTPRIVATE
)
13699 code
= OMP_CLAUSE_LASTPRIVATE
;
13700 else if (flags
& (GOVD_ALIGNED
| GOVD_NONTEMPORAL
))
13702 else if (flags
& GOVD_CONDTEMP
)
13704 code
= OMP_CLAUSE__CONDTEMP_
;
13705 gimple_add_tmp_var (decl
);
13708 gcc_unreachable ();
13710 if (((flags
& GOVD_LASTPRIVATE
)
13711 || (code
== OMP_CLAUSE_SHARED
&& (flags
& GOVD_WRITTEN
)))
13712 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
13713 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
13715 tree chain
= *list_p
;
13716 clause
= build_omp_clause (input_location
, code
);
13717 OMP_CLAUSE_DECL (clause
) = decl
;
13718 OMP_CLAUSE_CHAIN (clause
) = chain
;
13720 OMP_CLAUSE_PRIVATE_DEBUG (clause
) = 1;
13721 else if (code
== OMP_CLAUSE_PRIVATE
&& (flags
& GOVD_PRIVATE_OUTER_REF
))
13722 OMP_CLAUSE_PRIVATE_OUTER_REF (clause
) = 1;
13723 else if (code
== OMP_CLAUSE_SHARED
13724 && (flags
& GOVD_WRITTEN
) == 0
13725 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
13726 OMP_CLAUSE_SHARED_READONLY (clause
) = 1;
13727 else if (code
== OMP_CLAUSE_FIRSTPRIVATE
&& (flags
& GOVD_EXPLICIT
) == 0)
13728 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause
) = 1;
13729 else if (code
== OMP_CLAUSE_MAP
&& (flags
& GOVD_MAP_0LEN_ARRAY
) != 0)
13731 tree nc
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
13732 OMP_CLAUSE_DECL (nc
) = decl
;
13733 if (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
13734 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == POINTER_TYPE
)
13735 OMP_CLAUSE_DECL (clause
)
13736 = build_fold_indirect_ref_loc (input_location
, decl
);
13737 OMP_CLAUSE_DECL (clause
)
13738 = build2 (MEM_REF
, char_type_node
, OMP_CLAUSE_DECL (clause
),
13739 build_int_cst (build_pointer_type (char_type_node
), 0));
13740 OMP_CLAUSE_SIZE (clause
) = size_zero_node
;
13741 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
13742 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_ALLOC
);
13743 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause
) = 1;
13744 tree dtype
= TREE_TYPE (decl
);
13745 if (TREE_CODE (dtype
) == REFERENCE_TYPE
)
13746 dtype
= TREE_TYPE (dtype
);
13747 /* FIRSTPRIVATE_POINTER doesn't work well if we have a
13748 multiply-indirected pointer. If we have a reference to a pointer to
13749 a pointer, it's possible that this should really be
13750 GOMP_MAP_FIRSTPRIVATE_REFERENCE -- but that also doesn't work at the
13751 moment, so stick with this. (See PR113279 and testcases
13752 baseptrs-{4,6}.C:ref2ptrptr_offset_decl_member_slice). */
13753 if (TREE_CODE (dtype
) == POINTER_TYPE
13754 && TREE_CODE (TREE_TYPE (dtype
)) == POINTER_TYPE
)
13755 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_POINTER
);
13757 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_FIRSTPRIVATE_POINTER
);
13758 OMP_CLAUSE_CHAIN (nc
) = chain
;
13759 OMP_CLAUSE_CHAIN (clause
) = nc
;
13760 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
13761 gimplify_omp_ctxp
= ctx
->outer_context
;
13762 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause
), 0),
13763 pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
13764 gimplify_omp_ctxp
= ctx
;
13766 else if (code
== OMP_CLAUSE_MAP
)
13769 /* Not all combinations of these GOVD_MAP flags are actually valid. */
13770 switch (flags
& (GOVD_MAP_TO_ONLY
13772 | GOVD_MAP_FORCE_PRESENT
13773 | GOVD_MAP_ALLOC_ONLY
13774 | GOVD_MAP_FROM_ONLY
))
13777 kind
= GOMP_MAP_TOFROM
;
13779 case GOVD_MAP_FORCE
:
13780 kind
= GOMP_MAP_TOFROM
| GOMP_MAP_FLAG_FORCE
;
13782 case GOVD_MAP_TO_ONLY
:
13783 kind
= GOMP_MAP_TO
;
13785 case GOVD_MAP_FROM_ONLY
:
13786 kind
= GOMP_MAP_FROM
;
13788 case GOVD_MAP_ALLOC_ONLY
:
13789 kind
= GOMP_MAP_ALLOC
;
13791 case GOVD_MAP_TO_ONLY
| GOVD_MAP_FORCE
:
13792 kind
= GOMP_MAP_TO
| GOMP_MAP_FLAG_FORCE
;
13794 case GOVD_MAP_FORCE_PRESENT
:
13795 kind
= GOMP_MAP_FORCE_PRESENT
;
13797 case GOVD_MAP_FORCE_PRESENT
| GOVD_MAP_ALLOC_ONLY
:
13798 kind
= GOMP_MAP_FORCE_PRESENT
;
13801 gcc_unreachable ();
13803 OMP_CLAUSE_SET_MAP_KIND (clause
, kind
);
13804 /* Setting of the implicit flag for the runtime is currently disabled for
13806 if ((gimplify_omp_ctxp
->region_type
& ORT_ACC
) == 0)
13807 OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (clause
) = 1;
13808 if (DECL_SIZE (decl
)
13809 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
13811 tree decl2
= DECL_VALUE_EXPR (decl
);
13812 gcc_assert (INDIRECT_REF_P (decl2
));
13813 decl2
= TREE_OPERAND (decl2
, 0);
13814 gcc_assert (DECL_P (decl2
));
13815 tree mem
= build_simple_mem_ref (decl2
);
13816 OMP_CLAUSE_DECL (clause
) = mem
;
13817 OMP_CLAUSE_SIZE (clause
) = TYPE_SIZE_UNIT (TREE_TYPE (decl
));
13818 if (gimplify_omp_ctxp
->outer_context
)
13820 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
->outer_context
;
13821 omp_notice_variable (ctx
, decl2
, true);
13822 omp_notice_variable (ctx
, OMP_CLAUSE_SIZE (clause
), true);
13824 tree nc
= build_omp_clause (OMP_CLAUSE_LOCATION (clause
),
13826 OMP_CLAUSE_DECL (nc
) = decl
;
13827 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
13828 if (gimplify_omp_ctxp
->target_firstprivatize_array_bases
)
13829 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_FIRSTPRIVATE_POINTER
);
13831 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_POINTER
);
13832 OMP_CLAUSE_CHAIN (nc
) = OMP_CLAUSE_CHAIN (clause
);
13833 OMP_CLAUSE_CHAIN (clause
) = nc
;
13835 else if (gimplify_omp_ctxp
->target_firstprivatize_array_bases
13836 && omp_privatize_by_reference (decl
))
13838 OMP_CLAUSE_DECL (clause
) = build_simple_mem_ref (decl
);
13839 OMP_CLAUSE_SIZE (clause
)
13840 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
))));
13841 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
13842 gimplify_omp_ctxp
= ctx
->outer_context
;
13843 gimplify_expr (&OMP_CLAUSE_SIZE (clause
),
13844 pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
13845 gimplify_omp_ctxp
= ctx
;
13846 tree nc
= build_omp_clause (OMP_CLAUSE_LOCATION (clause
),
13848 OMP_CLAUSE_DECL (nc
) = decl
;
13849 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
13850 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_FIRSTPRIVATE_REFERENCE
);
13851 OMP_CLAUSE_CHAIN (nc
) = OMP_CLAUSE_CHAIN (clause
);
13852 OMP_CLAUSE_CHAIN (clause
) = nc
;
13855 OMP_CLAUSE_SIZE (clause
) = DECL_SIZE_UNIT (decl
);
13857 if (code
== OMP_CLAUSE_FIRSTPRIVATE
&& (flags
& GOVD_LASTPRIVATE
) != 0)
13859 tree nc
= build_omp_clause (input_location
, OMP_CLAUSE_LASTPRIVATE
);
13860 OMP_CLAUSE_DECL (nc
) = decl
;
13861 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc
) = 1;
13862 OMP_CLAUSE_CHAIN (nc
) = chain
;
13863 OMP_CLAUSE_CHAIN (clause
) = nc
;
13864 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
13865 gimplify_omp_ctxp
= ctx
->outer_context
;
13866 lang_hooks
.decls
.omp_finish_clause (nc
, pre_p
,
13867 (ctx
->region_type
& ORT_ACC
) != 0);
13868 gimplify_omp_ctxp
= ctx
;
13871 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
13872 gimplify_omp_ctxp
= ctx
->outer_context
;
13873 /* Don't call omp_finish_clause on implicitly added OMP_CLAUSE_PRIVATE
13874 in simd. Those are only added for the local vars inside of simd body
13875 and they don't need to be e.g. default constructible. */
13876 if (code
!= OMP_CLAUSE_PRIVATE
|| ctx
->region_type
!= ORT_SIMD
)
13877 lang_hooks
.decls
.omp_finish_clause (clause
, pre_p
,
13878 (ctx
->region_type
& ORT_ACC
) != 0);
13879 if (gimplify_omp_ctxp
)
13880 for (; clause
!= chain
; clause
= OMP_CLAUSE_CHAIN (clause
))
13881 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_MAP
13882 && DECL_P (OMP_CLAUSE_SIZE (clause
)))
13883 omp_notice_variable (gimplify_omp_ctxp
, OMP_CLAUSE_SIZE (clause
),
13885 gimplify_omp_ctxp
= ctx
;
13890 gimplify_adjust_omp_clauses (gimple_seq
*pre_p
, gimple_seq body
, tree
*list_p
,
13891 enum tree_code code
)
13893 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
13894 tree
*orig_list_p
= list_p
;
13896 bool has_inscan_reductions
= false;
13900 struct gimplify_omp_ctx
*octx
;
13901 for (octx
= ctx
; octx
; octx
= octx
->outer_context
)
13902 if ((octx
->region_type
& (ORT_PARALLEL
| ORT_TASK
| ORT_TEAMS
)) != 0)
13906 struct walk_stmt_info wi
;
13907 memset (&wi
, 0, sizeof (wi
));
13908 walk_gimple_seq (body
, omp_find_stores_stmt
,
13909 omp_find_stores_op
, &wi
);
13913 if (ctx
->add_safelen1
)
13915 /* If there are VLAs in the body of simd loop, prevent
13917 gcc_assert (ctx
->region_type
== ORT_SIMD
);
13918 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_SAFELEN
);
13919 OMP_CLAUSE_SAFELEN_EXPR (c
) = integer_one_node
;
13920 OMP_CLAUSE_CHAIN (c
) = *list_p
;
13922 list_p
= &OMP_CLAUSE_CHAIN (c
);
13925 if (ctx
->region_type
== ORT_WORKSHARE
13926 && ctx
->outer_context
13927 && ctx
->outer_context
->region_type
== ORT_COMBINED_PARALLEL
)
13929 for (c
= ctx
->outer_context
->clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
13930 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
13931 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
13933 decl
= OMP_CLAUSE_DECL (c
);
13935 = splay_tree_lookup (ctx
->outer_context
->variables
,
13936 (splay_tree_key
) decl
);
13937 gcc_checking_assert (!splay_tree_lookup (ctx
->variables
,
13938 (splay_tree_key
) decl
));
13939 omp_add_variable (ctx
, decl
, n
->value
);
13940 tree c2
= copy_node (c
);
13941 OMP_CLAUSE_CHAIN (c2
) = *list_p
;
13943 if ((n
->value
& GOVD_FIRSTPRIVATE
) == 0)
13945 c2
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
13946 OMP_CLAUSE_FIRSTPRIVATE
);
13947 OMP_CLAUSE_DECL (c2
) = decl
;
13948 OMP_CLAUSE_CHAIN (c2
) = *list_p
;
13953 if (code
== OMP_TARGET
13954 || code
== OMP_TARGET_DATA
13955 || code
== OMP_TARGET_ENTER_DATA
13956 || code
== OMP_TARGET_EXIT_DATA
)
13958 vec
<omp_mapping_group
> *groups
;
13959 groups
= omp_gather_mapping_groups (list_p
);
13960 hash_map
<tree_operand_hash_no_se
, omp_mapping_group
*> *grpmap
= NULL
;
13964 grpmap
= omp_index_mapping_groups (groups
);
13966 omp_resolve_clause_dependencies (code
, groups
, grpmap
);
13967 omp_build_struct_sibling_lists (code
, ctx
->region_type
, groups
,
13970 omp_mapping_group
*outlist
= NULL
;
13975 /* Rebuild now we have struct sibling lists. */
13976 groups
= omp_gather_mapping_groups (list_p
);
13977 grpmap
= omp_index_mapping_groups (groups
);
13979 bool enter_exit
= (code
== OMP_TARGET_ENTER_DATA
13980 || code
== OMP_TARGET_EXIT_DATA
);
13982 outlist
= omp_tsort_mapping_groups (groups
, grpmap
, enter_exit
);
13983 outlist
= omp_segregate_mapping_groups (outlist
);
13984 list_p
= omp_reorder_mapping_groups (groups
, outlist
, list_p
);
13990 else if (ctx
->region_type
& ORT_ACC
)
13992 vec
<omp_mapping_group
> *groups
;
13993 groups
= omp_gather_mapping_groups (list_p
);
13996 hash_map
<tree_operand_hash_no_se
, omp_mapping_group
*> *grpmap
;
13997 grpmap
= omp_index_mapping_groups (groups
);
13999 oacc_resolve_clause_dependencies (groups
, grpmap
);
14000 omp_build_struct_sibling_lists (code
, ctx
->region_type
, groups
,
14008 tree attach_list
= NULL_TREE
;
14009 tree
*attach_tail
= &attach_list
;
14011 tree
*grp_start_p
= NULL
, grp_end
= NULL_TREE
;
14013 while ((c
= *list_p
) != NULL
)
14016 bool remove
= false;
14017 bool move_attach
= false;
14019 if (grp_end
&& c
== OMP_CLAUSE_CHAIN (grp_end
))
14020 grp_end
= NULL_TREE
;
14022 switch (OMP_CLAUSE_CODE (c
))
14024 case OMP_CLAUSE_FIRSTPRIVATE
:
14025 if ((ctx
->region_type
& ORT_TARGET
)
14026 && (ctx
->region_type
& ORT_ACC
) == 0
14027 && TYPE_ATOMIC (strip_array_types
14028 (TREE_TYPE (OMP_CLAUSE_DECL (c
)))))
14030 error_at (OMP_CLAUSE_LOCATION (c
),
14031 "%<_Atomic%> %qD in %<firstprivate%> clause on "
14032 "%<target%> construct", OMP_CLAUSE_DECL (c
));
14036 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
14038 decl
= OMP_CLAUSE_DECL (c
);
14039 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
14040 if ((n
->value
& GOVD_MAP
) != 0)
14045 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT_TARGET (c
) = 0;
14046 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
) = 0;
14049 case OMP_CLAUSE_PRIVATE
:
14050 case OMP_CLAUSE_SHARED
:
14051 case OMP_CLAUSE_LINEAR
:
14052 decl
= OMP_CLAUSE_DECL (c
);
14053 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
14054 remove
= !(n
->value
& GOVD_SEEN
);
14055 if ((n
->value
& GOVD_LASTPRIVATE_CONDITIONAL
) != 0
14056 && code
== OMP_PARALLEL
14057 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
14061 bool shared
= OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
;
14062 if ((n
->value
& GOVD_DEBUG_PRIVATE
)
14063 || lang_hooks
.decls
.omp_private_debug_clause (decl
, shared
))
14065 gcc_assert ((n
->value
& GOVD_DEBUG_PRIVATE
) == 0
14066 || ((n
->value
& GOVD_DATA_SHARE_CLASS
)
14068 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_PRIVATE
);
14069 OMP_CLAUSE_PRIVATE_DEBUG (c
) = 1;
14071 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
14074 n
->value
|= GOVD_WRITTEN
;
14075 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
14076 && (n
->value
& GOVD_WRITTEN
) == 0
14078 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
14079 OMP_CLAUSE_SHARED_READONLY (c
) = 1;
14080 else if (DECL_P (decl
)
14081 && ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
14082 && (n
->value
& GOVD_WRITTEN
) != 0)
14083 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
14084 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
14085 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
14086 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
14089 n
->value
&= ~GOVD_EXPLICIT
;
14092 case OMP_CLAUSE_LASTPRIVATE
:
14093 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
14094 accurately reflect the presence of a FIRSTPRIVATE clause. */
14095 decl
= OMP_CLAUSE_DECL (c
);
14096 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
14097 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
)
14098 = (n
->value
& GOVD_FIRSTPRIVATE
) != 0;
14099 if (code
== OMP_DISTRIBUTE
14100 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
14103 error_at (OMP_CLAUSE_LOCATION (c
),
14104 "same variable used in %<firstprivate%> and "
14105 "%<lastprivate%> clauses on %<distribute%> "
14109 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
14111 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
14112 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
14113 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) && code
== OMP_PARALLEL
)
14117 case OMP_CLAUSE_ALIGNED
:
14118 decl
= OMP_CLAUSE_DECL (c
);
14119 if (!is_global_var (decl
))
14121 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
14122 remove
= n
== NULL
|| !(n
->value
& GOVD_SEEN
);
14123 if (!remove
&& TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
)
14125 struct gimplify_omp_ctx
*octx
;
14127 && (n
->value
& (GOVD_DATA_SHARE_CLASS
14128 & ~GOVD_FIRSTPRIVATE
)))
14131 for (octx
= ctx
->outer_context
; octx
;
14132 octx
= octx
->outer_context
)
14134 n
= splay_tree_lookup (octx
->variables
,
14135 (splay_tree_key
) decl
);
14138 if (n
->value
& GOVD_LOCAL
)
14140 /* We have to avoid assigning a shared variable
14141 to itself when trying to add
14142 __builtin_assume_aligned. */
14143 if (n
->value
& GOVD_SHARED
)
14151 else if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
14153 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
14154 if (n
!= NULL
&& (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
14159 case OMP_CLAUSE_HAS_DEVICE_ADDR
:
14160 decl
= OMP_CLAUSE_DECL (c
);
14161 while (INDIRECT_REF_P (decl
)
14162 || TREE_CODE (decl
) == ARRAY_REF
)
14163 decl
= TREE_OPERAND (decl
, 0);
14164 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
14165 remove
= n
== NULL
|| !(n
->value
& GOVD_SEEN
);
14168 case OMP_CLAUSE_IS_DEVICE_PTR
:
14169 case OMP_CLAUSE_NONTEMPORAL
:
14170 decl
= OMP_CLAUSE_DECL (c
);
14171 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
14172 remove
= n
== NULL
|| !(n
->value
& GOVD_SEEN
);
14175 case OMP_CLAUSE_MAP
:
14176 decl
= OMP_CLAUSE_DECL (c
);
14179 grp_start_p
= list_p
;
14180 grp_end
= *omp_group_last (grp_start_p
);
14182 switch (OMP_CLAUSE_MAP_KIND (c
))
14184 case GOMP_MAP_PRESENT_ALLOC
:
14185 case GOMP_MAP_PRESENT_TO
:
14186 case GOMP_MAP_PRESENT_FROM
:
14187 case GOMP_MAP_PRESENT_TOFROM
:
14188 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_FORCE_PRESENT
);
14196 if (TREE_CODE (TREE_TYPE (decl
)) != ARRAY_TYPE
)
14199 case OACC_HOST_DATA
:
14200 case OACC_ENTER_DATA
:
14201 case OACC_EXIT_DATA
:
14202 case OMP_TARGET_DATA
:
14203 case OMP_TARGET_ENTER_DATA
:
14204 case OMP_TARGET_EXIT_DATA
:
14205 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
14206 || (OMP_CLAUSE_MAP_KIND (c
)
14207 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
14208 /* For target {,enter ,exit }data only the array slice is
14209 mapped, but not the pointer to it. */
14211 if (code
== OMP_TARGET_EXIT_DATA
14212 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_POINTER
14213 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
))
14223 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
14225 /* Sanity check: attach/detach map kinds use the size as a bias,
14226 and it's never right to use the decl size for such
14228 gcc_assert (OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ATTACH
14229 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_DETACH
14230 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FORCE_DETACH
14231 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ATTACH_DETACH
14232 && (OMP_CLAUSE_MAP_KIND (c
)
14233 != GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
));
14234 OMP_CLAUSE_SIZE (c
) = DECL_P (decl
) ? DECL_SIZE_UNIT (decl
)
14235 : TYPE_SIZE_UNIT (TREE_TYPE (decl
));
14237 gimplify_omp_ctxp
= ctx
->outer_context
;
14238 if (gimplify_expr (&OMP_CLAUSE_SIZE (c
), pre_p
, NULL
,
14239 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
14241 gimplify_omp_ctxp
= ctx
;
14245 else if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
14246 || (OMP_CLAUSE_MAP_KIND (c
)
14247 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
14248 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
)
14249 && TREE_CODE (OMP_CLAUSE_SIZE (c
)) != INTEGER_CST
)
14251 OMP_CLAUSE_SIZE (c
)
14252 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c
), pre_p
, NULL
,
14254 if ((ctx
->region_type
& ORT_TARGET
) != 0)
14255 omp_add_variable (ctx
, OMP_CLAUSE_SIZE (c
),
14256 GOVD_FIRSTPRIVATE
| GOVD_SEEN
);
14258 gimplify_omp_ctxp
= ctx
;
14259 /* Data clauses associated with reductions must be
14260 compatible with present_or_copy. Warn and adjust the clause
14261 if that is not the case. */
14262 if (ctx
->region_type
== ORT_ACC_PARALLEL
14263 || ctx
->region_type
== ORT_ACC_SERIAL
)
14265 tree t
= DECL_P (decl
) ? decl
: TREE_OPERAND (decl
, 0);
14269 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) t
);
14271 if (n
&& (n
->value
& GOVD_REDUCTION
))
14273 enum gomp_map_kind kind
= OMP_CLAUSE_MAP_KIND (c
);
14275 OMP_CLAUSE_MAP_IN_REDUCTION (c
) = 1;
14276 if ((kind
& GOMP_MAP_TOFROM
) != GOMP_MAP_TOFROM
14277 && kind
!= GOMP_MAP_FORCE_PRESENT
14278 && kind
!= GOMP_MAP_POINTER
)
14280 warning_at (OMP_CLAUSE_LOCATION (c
), 0,
14281 "incompatible data clause with reduction "
14282 "on %qE; promoting to %<present_or_copy%>",
14284 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_TOFROM
);
14288 if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_STRUCT
14289 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_STRUCT_UNORD
)
14290 && (code
== OMP_TARGET_EXIT_DATA
|| code
== OACC_EXIT_DATA
))
14295 /* If we have a DECL_VALUE_EXPR (e.g. this is a class member and/or
14296 a variable captured in a lambda closure), look through that now
14297 before the DECL_P check below. (A code other than COMPONENT_REF,
14298 i.e. INDIRECT_REF, will be a VLA/variable-length array
14299 section. A global var may be a variable in a common block. We
14300 don't want to do this here for either of those.) */
14301 if ((ctx
->region_type
& ORT_ACC
) == 0
14303 && !is_global_var (decl
)
14304 && DECL_HAS_VALUE_EXPR_P (decl
)
14305 && TREE_CODE (DECL_VALUE_EXPR (decl
)) == COMPONENT_REF
)
14306 decl
= OMP_CLAUSE_DECL (c
) = DECL_VALUE_EXPR (decl
);
14307 if (TREE_CODE (decl
) == TARGET_EXPR
)
14309 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
, NULL
,
14310 is_gimple_lvalue
, fb_lvalue
) == GS_ERROR
)
14313 else if (!DECL_P (decl
))
14315 if ((ctx
->region_type
& ORT_TARGET
) != 0
14316 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
14318 if (INDIRECT_REF_P (decl
)
14319 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
14320 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
14321 == REFERENCE_TYPE
))
14322 decl
= TREE_OPERAND (decl
, 0);
14323 if (TREE_CODE (decl
) == COMPONENT_REF
)
14325 while (TREE_CODE (decl
) == COMPONENT_REF
)
14326 decl
= TREE_OPERAND (decl
, 0);
14329 n
= splay_tree_lookup (ctx
->variables
,
14330 (splay_tree_key
) decl
);
14331 if (!(n
->value
& GOVD_SEEN
))
14337 tree d
= decl
, *pd
;
14338 if (TREE_CODE (d
) == ARRAY_REF
)
14340 while (TREE_CODE (d
) == ARRAY_REF
)
14341 d
= TREE_OPERAND (d
, 0);
14342 if (TREE_CODE (d
) == COMPONENT_REF
14343 && TREE_CODE (TREE_TYPE (d
)) == ARRAY_TYPE
)
14346 pd
= &OMP_CLAUSE_DECL (c
);
14348 && TREE_CODE (decl
) == INDIRECT_REF
14349 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
14350 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
14352 && (OMP_CLAUSE_MAP_KIND (c
)
14353 != GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION
))
14355 pd
= &TREE_OPERAND (decl
, 0);
14356 decl
= TREE_OPERAND (decl
, 0);
14359 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
)
14362 case OACC_ENTER_DATA
:
14363 case OACC_EXIT_DATA
:
14364 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c
)))
14367 else if (code
== OACC_ENTER_DATA
)
14368 goto change_to_attach
;
14370 case OMP_TARGET_EXIT_DATA
:
14371 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_DETACH
);
14374 /* An "attach/detach" operation on an update directive
14375 should behave as a GOMP_MAP_ALWAYS_POINTER. Note that
14376 both GOMP_MAP_ATTACH_DETACH and GOMP_MAP_ALWAYS_POINTER
14377 kinds depend on the previous mapping (for non-TARGET
14379 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_ALWAYS_POINTER
);
14383 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_ATTACH
);
14384 if ((ctx
->region_type
& ORT_TARGET
) != 0)
14385 move_attach
= true;
14387 else if ((ctx
->region_type
& ORT_TARGET
) != 0
14388 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
14389 || (OMP_CLAUSE_MAP_KIND (c
)
14390 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
)))
14391 move_attach
= true;
14393 /* If we have e.g. map(struct: *var), don't gimplify the
14394 argument since omp-low.cc wants to see the decl itself. */
14395 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_STRUCT
)
14398 /* We've already partly gimplified this in
14399 gimplify_scan_omp_clauses. Don't do any more. */
14400 if (code
== OMP_TARGET
&& OMP_CLAUSE_MAP_IN_REDUCTION (c
))
14403 gimplify_omp_ctxp
= ctx
->outer_context
;
14404 if (gimplify_expr (pd
, pre_p
, NULL
, is_gimple_lvalue
,
14405 fb_lvalue
) == GS_ERROR
)
14407 gimplify_omp_ctxp
= ctx
;
14411 if ((code
== OMP_TARGET
14412 || code
== OMP_TARGET_DATA
14413 || code
== OMP_TARGET_ENTER_DATA
14414 || code
== OMP_TARGET_EXIT_DATA
)
14415 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
)
14417 bool firstprivatize
= false;
14419 for (struct gimplify_omp_ctx
*octx
= ctx
->outer_context
; octx
;
14420 octx
= octx
->outer_context
)
14423 = splay_tree_lookup (octx
->variables
,
14424 (splay_tree_key
) OMP_CLAUSE_DECL (c
));
14425 /* If this is contained in an outer OpenMP region as a
14426 firstprivate value, remove the attach/detach. */
14427 if (n
&& (n
->value
& GOVD_FIRSTPRIVATE
))
14429 firstprivatize
= true;
14434 enum gomp_map_kind map_kind
;
14435 if (firstprivatize
)
14436 map_kind
= GOMP_MAP_FIRSTPRIVATE_POINTER
;
14437 else if (code
== OMP_TARGET_EXIT_DATA
)
14438 map_kind
= GOMP_MAP_DETACH
;
14440 map_kind
= GOMP_MAP_ATTACH
;
14441 OMP_CLAUSE_SET_MAP_KIND (c
, map_kind
);
14443 else if ((ctx
->region_type
& ORT_ACC
) != 0
14444 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
)
14446 enum gomp_map_kind map_kind
= (code
== OACC_EXIT_DATA
14448 : GOMP_MAP_ATTACH
);
14449 OMP_CLAUSE_SET_MAP_KIND (c
, map_kind
);
14452 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
14453 if ((ctx
->region_type
& ORT_TARGET
) != 0
14454 && !(n
->value
& GOVD_SEEN
)
14455 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c
)) == 0
14456 && (!is_global_var (decl
)
14457 || !lookup_attribute ("omp declare target link",
14458 DECL_ATTRIBUTES (decl
))))
14461 /* For struct element mapping, if struct is never referenced
14462 in target block and none of the mapping has always modifier,
14463 remove all the struct element mappings, which immediately
14464 follow the GOMP_MAP_STRUCT map clause. */
14465 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_STRUCT
14466 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_STRUCT_UNORD
)
14468 HOST_WIDE_INT cnt
= tree_to_shwi (OMP_CLAUSE_SIZE (c
));
14470 OMP_CLAUSE_CHAIN (c
)
14471 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c
));
14474 else if (DECL_SIZE (decl
)
14475 && !poly_int_tree_p (DECL_SIZE (decl
))
14476 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_POINTER
14477 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
14478 && (OMP_CLAUSE_MAP_KIND (c
)
14479 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
14481 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
14482 for these, TREE_CODE (DECL_SIZE (decl)) will always be
14484 gcc_assert (OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FORCE_DEVICEPTR
);
14486 tree decl2
= DECL_VALUE_EXPR (decl
);
14487 gcc_assert (INDIRECT_REF_P (decl2
));
14488 decl2
= TREE_OPERAND (decl2
, 0);
14489 gcc_assert (DECL_P (decl2
));
14490 tree mem
= build_simple_mem_ref (decl2
);
14491 OMP_CLAUSE_DECL (c
) = mem
;
14492 OMP_CLAUSE_SIZE (c
) = TYPE_SIZE_UNIT (TREE_TYPE (decl
));
14493 if (ctx
->outer_context
)
14495 omp_notice_variable (ctx
->outer_context
, decl2
, true);
14496 omp_notice_variable (ctx
->outer_context
,
14497 OMP_CLAUSE_SIZE (c
), true);
14499 if (((ctx
->region_type
& ORT_TARGET
) != 0
14500 || !ctx
->target_firstprivatize_array_bases
)
14501 && ((n
->value
& GOVD_SEEN
) == 0
14502 || (n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
)) == 0))
14504 tree nc
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
14506 OMP_CLAUSE_DECL (nc
) = decl
;
14507 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
14508 if (ctx
->target_firstprivatize_array_bases
)
14509 OMP_CLAUSE_SET_MAP_KIND (nc
,
14510 GOMP_MAP_FIRSTPRIVATE_POINTER
);
14512 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_POINTER
);
14513 OMP_CLAUSE_CHAIN (nc
) = OMP_CLAUSE_CHAIN (c
);
14514 OMP_CLAUSE_CHAIN (c
) = nc
;
14520 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
14521 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
14522 gcc_assert ((n
->value
& GOVD_SEEN
) == 0
14523 || ((n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
))
14527 /* If we have a target region, we can push all the attaches to the
14528 end of the list (we may have standalone "attach" operations
14529 synthesized for GOMP_MAP_STRUCT nodes that must be processed after
14530 the attachment point AND the pointed-to block have been mapped).
14531 If we have something else, e.g. "enter data", we need to keep
14532 "attach" nodes together with the previous node they attach to so
14533 that separate "exit data" operations work properly (see
14534 libgomp/target.c). */
14535 if ((ctx
->region_type
& ORT_TARGET
) != 0
14536 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
14537 || (OMP_CLAUSE_MAP_KIND (c
)
14538 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
)))
14539 move_attach
= true;
14543 case OMP_CLAUSE_TO
:
14544 case OMP_CLAUSE_FROM
:
14545 case OMP_CLAUSE__CACHE_
:
14546 decl
= OMP_CLAUSE_DECL (c
);
14547 if (!DECL_P (decl
))
14549 if (DECL_SIZE (decl
)
14550 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
14552 tree decl2
= DECL_VALUE_EXPR (decl
);
14553 gcc_assert (INDIRECT_REF_P (decl2
));
14554 decl2
= TREE_OPERAND (decl2
, 0);
14555 gcc_assert (DECL_P (decl2
));
14556 tree mem
= build_simple_mem_ref (decl2
);
14557 OMP_CLAUSE_DECL (c
) = mem
;
14558 OMP_CLAUSE_SIZE (c
) = TYPE_SIZE_UNIT (TREE_TYPE (decl
));
14559 if (ctx
->outer_context
)
14561 omp_notice_variable (ctx
->outer_context
, decl2
, true);
14562 omp_notice_variable (ctx
->outer_context
,
14563 OMP_CLAUSE_SIZE (c
), true);
14566 else if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
14567 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
14570 case OMP_CLAUSE_REDUCTION
:
14571 if (OMP_CLAUSE_REDUCTION_INSCAN (c
))
14573 decl
= OMP_CLAUSE_DECL (c
);
14574 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
14575 if ((n
->value
& GOVD_REDUCTION_INSCAN
) == 0)
14578 error_at (OMP_CLAUSE_LOCATION (c
),
14579 "%qD specified in %<inscan%> %<reduction%> clause "
14580 "but not in %<scan%> directive clause", decl
);
14583 has_inscan_reductions
= true;
14586 case OMP_CLAUSE_IN_REDUCTION
:
14587 case OMP_CLAUSE_TASK_REDUCTION
:
14588 decl
= OMP_CLAUSE_DECL (c
);
14589 /* OpenACC reductions need a present_or_copy data clause.
14590 Add one if necessary. Emit error when the reduction is private. */
14591 if (ctx
->region_type
== ORT_ACC_PARALLEL
14592 || ctx
->region_type
== ORT_ACC_SERIAL
)
14594 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
14595 if (n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
))
14598 error_at (OMP_CLAUSE_LOCATION (c
), "invalid private "
14599 "reduction on %qE", DECL_NAME (decl
));
14601 else if ((n
->value
& GOVD_MAP
) == 0)
14603 tree next
= OMP_CLAUSE_CHAIN (c
);
14604 tree nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_MAP
);
14605 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_TOFROM
);
14606 OMP_CLAUSE_DECL (nc
) = decl
;
14607 OMP_CLAUSE_CHAIN (c
) = nc
;
14608 lang_hooks
.decls
.omp_finish_clause (nc
, pre_p
,
14613 OMP_CLAUSE_MAP_IN_REDUCTION (nc
) = 1;
14614 if (OMP_CLAUSE_CHAIN (nc
) == NULL
)
14616 nc
= OMP_CLAUSE_CHAIN (nc
);
14618 OMP_CLAUSE_CHAIN (nc
) = next
;
14619 n
->value
|= GOVD_MAP
;
14623 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
14624 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
14627 case OMP_CLAUSE_ALLOCATE
:
14628 decl
= OMP_CLAUSE_DECL (c
);
14629 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
14630 if (n
!= NULL
&& !(n
->value
& GOVD_SEEN
))
14632 if ((n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
| GOVD_LINEAR
))
14634 && (n
->value
& (GOVD_REDUCTION
| GOVD_LASTPRIVATE
)) == 0)
14638 && OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)
14639 && TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)) != INTEGER_CST
14640 && ((ctx
->region_type
& (ORT_PARALLEL
| ORT_TARGET
)) != 0
14641 || (ctx
->region_type
& ORT_TASKLOOP
) == ORT_TASK
14642 || (ctx
->region_type
& ORT_HOST_TEAMS
) == ORT_HOST_TEAMS
))
14644 tree allocator
= OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
);
14645 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) allocator
);
14648 enum omp_clause_default_kind default_kind
14649 = ctx
->default_kind
;
14650 ctx
->default_kind
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
14651 omp_notice_variable (ctx
, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
),
14653 ctx
->default_kind
= default_kind
;
14656 omp_notice_variable (ctx
, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
),
14661 case OMP_CLAUSE_COPYIN
:
14662 case OMP_CLAUSE_COPYPRIVATE
:
14663 case OMP_CLAUSE_IF
:
14664 case OMP_CLAUSE_SELF
:
14665 case OMP_CLAUSE_NUM_THREADS
:
14666 case OMP_CLAUSE_NUM_TEAMS
:
14667 case OMP_CLAUSE_THREAD_LIMIT
:
14668 case OMP_CLAUSE_DIST_SCHEDULE
:
14669 case OMP_CLAUSE_DEVICE
:
14670 case OMP_CLAUSE_SCHEDULE
:
14671 case OMP_CLAUSE_NOWAIT
:
14672 case OMP_CLAUSE_ORDERED
:
14673 case OMP_CLAUSE_DEFAULT
:
14674 case OMP_CLAUSE_UNTIED
:
14675 case OMP_CLAUSE_COLLAPSE
:
14676 case OMP_CLAUSE_FINAL
:
14677 case OMP_CLAUSE_MERGEABLE
:
14678 case OMP_CLAUSE_PROC_BIND
:
14679 case OMP_CLAUSE_SAFELEN
:
14680 case OMP_CLAUSE_SIMDLEN
:
14681 case OMP_CLAUSE_DEPEND
:
14682 case OMP_CLAUSE_DOACROSS
:
14683 case OMP_CLAUSE_PRIORITY
:
14684 case OMP_CLAUSE_GRAINSIZE
:
14685 case OMP_CLAUSE_NUM_TASKS
:
14686 case OMP_CLAUSE_NOGROUP
:
14687 case OMP_CLAUSE_THREADS
:
14688 case OMP_CLAUSE_SIMD
:
14689 case OMP_CLAUSE_FILTER
:
14690 case OMP_CLAUSE_HINT
:
14691 case OMP_CLAUSE_DEFAULTMAP
:
14692 case OMP_CLAUSE_ORDER
:
14693 case OMP_CLAUSE_BIND
:
14694 case OMP_CLAUSE_DETACH
:
14695 case OMP_CLAUSE_USE_DEVICE_PTR
:
14696 case OMP_CLAUSE_USE_DEVICE_ADDR
:
14697 case OMP_CLAUSE_ASYNC
:
14698 case OMP_CLAUSE_WAIT
:
14699 case OMP_CLAUSE_INDEPENDENT
:
14700 case OMP_CLAUSE_NUM_GANGS
:
14701 case OMP_CLAUSE_NUM_WORKERS
:
14702 case OMP_CLAUSE_VECTOR_LENGTH
:
14703 case OMP_CLAUSE_GANG
:
14704 case OMP_CLAUSE_WORKER
:
14705 case OMP_CLAUSE_VECTOR
:
14706 case OMP_CLAUSE_AUTO
:
14707 case OMP_CLAUSE_SEQ
:
14708 case OMP_CLAUSE_TILE
:
14709 case OMP_CLAUSE_IF_PRESENT
:
14710 case OMP_CLAUSE_FINALIZE
:
14711 case OMP_CLAUSE_INCLUSIVE
:
14712 case OMP_CLAUSE_EXCLUSIVE
:
14715 case OMP_CLAUSE_NOHOST
:
14717 gcc_unreachable ();
14721 *list_p
= OMP_CLAUSE_CHAIN (c
);
14722 else if (move_attach
)
14724 /* Remove attach node from here, separate out into its own list. */
14726 *list_p
= OMP_CLAUSE_CHAIN (c
);
14727 OMP_CLAUSE_CHAIN (c
) = NULL_TREE
;
14728 attach_tail
= &OMP_CLAUSE_CHAIN (c
);
14731 list_p
= &OMP_CLAUSE_CHAIN (c
);
14734 /* Splice attach nodes at the end of the list. */
14737 *list_p
= attach_list
;
14738 list_p
= attach_tail
;
14741 /* Add in any implicit data sharing. */
14742 struct gimplify_adjust_omp_clauses_data data
;
14743 if ((gimplify_omp_ctxp
->region_type
& ORT_ACC
) == 0)
14745 /* OpenMP. Implicit clauses are added at the start of the clause list,
14746 but after any non-map clauses. */
14747 tree
*implicit_add_list_p
= orig_list_p
;
14748 while (*implicit_add_list_p
14749 && OMP_CLAUSE_CODE (*implicit_add_list_p
) != OMP_CLAUSE_MAP
)
14750 implicit_add_list_p
= &OMP_CLAUSE_CHAIN (*implicit_add_list_p
);
14751 data
.list_p
= implicit_add_list_p
;
14755 data
.list_p
= list_p
;
14756 data
.pre_p
= pre_p
;
14757 splay_tree_foreach (ctx
->variables
, gimplify_adjust_omp_clauses_1
, &data
);
14759 if (has_inscan_reductions
)
14760 for (c
= *orig_list_p
; c
; c
= OMP_CLAUSE_CHAIN (c
))
14761 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
14762 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
14764 error_at (OMP_CLAUSE_LOCATION (c
),
14765 "%<inscan%> %<reduction%> clause used together with "
14766 "%<linear%> clause for a variable other than loop "
14771 gimplify_omp_ctxp
= ctx
->outer_context
;
14772 delete_omp_context (ctx
);
14775 /* Return 0 if CONSTRUCTS selectors don't match the OpenMP context,
14776 -1 if unknown yet (simd is involved, won't be known until vectorization)
14777 and 1 if they do. If SCORES is non-NULL, it should point to an array
14778 of at least 2*NCONSTRUCTS+2 ints, and will be filled with the positions
14779 of the CONSTRUCTS (position -1 if it will never match) followed by
14780 number of constructs in the OpenMP context construct trait. If the
14781 score depends on whether it will be in a declare simd clone or not,
14782 the function returns 2 and there will be two sets of the scores, the first
14783 one for the case that it is not in a declare simd clone, the other
14784 that it is in a declare simd clone. */
14787 omp_construct_selector_matches (enum tree_code
*constructs
, int nconstructs
,
14790 int matched
= 0, cnt
= 0;
14791 bool simd_seen
= false;
14792 bool target_seen
= false;
14793 int declare_simd_cnt
= -1;
14794 auto_vec
<enum tree_code
, 16> codes
;
14795 for (struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
; ctx
;)
14797 if (((ctx
->region_type
& ORT_PARALLEL
) && ctx
->code
== OMP_PARALLEL
)
14798 || ((ctx
->region_type
& (ORT_TARGET
| ORT_IMPLICIT_TARGET
| ORT_ACC
))
14799 == ORT_TARGET
&& ctx
->code
== OMP_TARGET
)
14800 || ((ctx
->region_type
& ORT_TEAMS
) && ctx
->code
== OMP_TEAMS
)
14801 || (ctx
->region_type
== ORT_WORKSHARE
&& ctx
->code
== OMP_FOR
)
14802 || (ctx
->region_type
== ORT_SIMD
14803 && ctx
->code
== OMP_SIMD
14804 && !omp_find_clause (ctx
->clauses
, OMP_CLAUSE_BIND
)))
14808 codes
.safe_push (ctx
->code
);
14809 else if (matched
< nconstructs
&& ctx
->code
== constructs
[matched
])
14811 if (ctx
->code
== OMP_SIMD
)
14819 if (ctx
->code
== OMP_TARGET
)
14821 if (scores
== NULL
)
14822 return matched
< nconstructs
? 0 : simd_seen
? -1 : 1;
14823 target_seen
= true;
14827 else if (ctx
->region_type
== ORT_WORKSHARE
14828 && ctx
->code
== OMP_LOOP
14829 && ctx
->outer_context
14830 && ctx
->outer_context
->region_type
== ORT_COMBINED_PARALLEL
14831 && ctx
->outer_context
->outer_context
14832 && ctx
->outer_context
->outer_context
->code
== OMP_LOOP
14833 && ctx
->outer_context
->outer_context
->distribute
)
14834 ctx
= ctx
->outer_context
->outer_context
;
14835 ctx
= ctx
->outer_context
;
14838 && lookup_attribute ("omp declare simd",
14839 DECL_ATTRIBUTES (current_function_decl
)))
14841 /* Declare simd is a maybe case, it is supposed to be added only to the
14842 omp-simd-clone.cc added clones and not to the base function. */
14843 declare_simd_cnt
= cnt
++;
14845 codes
.safe_push (OMP_SIMD
);
14847 && constructs
[0] == OMP_SIMD
)
14849 gcc_assert (matched
== 0);
14851 if (++matched
== nconstructs
)
14855 if (tree attr
= lookup_attribute ("omp declare variant variant",
14856 DECL_ATTRIBUTES (current_function_decl
)))
14858 tree selectors
= TREE_VALUE (attr
);
14859 int variant_nconstructs
= list_length (selectors
);
14860 enum tree_code
*variant_constructs
= NULL
;
14861 if (!target_seen
&& variant_nconstructs
)
14864 = (enum tree_code
*) alloca (variant_nconstructs
14865 * sizeof (enum tree_code
));
14866 omp_construct_traits_to_codes (selectors
, variant_nconstructs
,
14867 variant_constructs
);
14869 for (int i
= 0; i
< variant_nconstructs
; i
++)
14873 codes
.safe_push (variant_constructs
[i
]);
14874 else if (matched
< nconstructs
14875 && variant_constructs
[i
] == constructs
[matched
])
14877 if (variant_constructs
[i
] == OMP_SIMD
)
14888 && lookup_attribute ("omp declare target block",
14889 DECL_ATTRIBUTES (current_function_decl
)))
14892 codes
.safe_push (OMP_TARGET
);
14893 else if (matched
< nconstructs
&& constructs
[matched
] == OMP_TARGET
)
14898 for (int pass
= 0; pass
< (declare_simd_cnt
== -1 ? 1 : 2); pass
++)
14900 int j
= codes
.length () - 1;
14901 for (int i
= nconstructs
- 1; i
>= 0; i
--)
14904 && (pass
!= 0 || declare_simd_cnt
!= j
)
14905 && constructs
[i
] != codes
[j
])
14907 if (pass
== 0 && declare_simd_cnt
!= -1 && j
> declare_simd_cnt
)
14912 *scores
++ = ((pass
== 0 && declare_simd_cnt
!= -1)
14913 ? codes
.length () - 1 : codes
.length ());
14915 return declare_simd_cnt
== -1 ? 1 : 2;
14917 if (matched
== nconstructs
)
14918 return simd_seen
? -1 : 1;
14922 /* Gimplify OACC_CACHE. */
14925 gimplify_oacc_cache (tree
*expr_p
, gimple_seq
*pre_p
)
14927 tree expr
= *expr_p
;
14929 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr
), pre_p
, ORT_ACC
,
14931 gimplify_adjust_omp_clauses (pre_p
, NULL
, &OACC_CACHE_CLAUSES (expr
),
14934 /* TODO: Do something sensible with this information. */
14936 *expr_p
= NULL_TREE
;
14939 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
14940 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
14941 kind. The entry kind will replace the one in CLAUSE, while the exit
14942 kind will be used in a new omp_clause and returned to the caller. */
14945 gimplify_oacc_declare_1 (tree clause
)
14947 HOST_WIDE_INT kind
, new_op
;
14951 kind
= OMP_CLAUSE_MAP_KIND (clause
);
14955 case GOMP_MAP_ALLOC
:
14956 new_op
= GOMP_MAP_RELEASE
;
14960 case GOMP_MAP_FROM
:
14961 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_FORCE_ALLOC
);
14962 new_op
= GOMP_MAP_FROM
;
14966 case GOMP_MAP_TOFROM
:
14967 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_TO
);
14968 new_op
= GOMP_MAP_FROM
;
14972 case GOMP_MAP_DEVICE_RESIDENT
:
14973 case GOMP_MAP_FORCE_DEVICEPTR
:
14974 case GOMP_MAP_FORCE_PRESENT
:
14975 case GOMP_MAP_LINK
:
14976 case GOMP_MAP_POINTER
:
14981 gcc_unreachable ();
14987 c
= build_omp_clause (OMP_CLAUSE_LOCATION (clause
), OMP_CLAUSE_MAP
);
14988 OMP_CLAUSE_SET_MAP_KIND (c
, new_op
);
14989 OMP_CLAUSE_DECL (c
) = OMP_CLAUSE_DECL (clause
);
14995 /* Gimplify OACC_DECLARE. */
14998 gimplify_oacc_declare (tree
*expr_p
, gimple_seq
*pre_p
)
15000 tree expr
= *expr_p
;
15002 tree clauses
, t
, decl
;
15004 clauses
= OACC_DECLARE_CLAUSES (expr
);
15006 gimplify_scan_omp_clauses (&clauses
, pre_p
, ORT_TARGET_DATA
, OACC_DECLARE
);
15007 gimplify_adjust_omp_clauses (pre_p
, NULL
, &clauses
, OACC_DECLARE
);
15009 for (t
= clauses
; t
; t
= OMP_CLAUSE_CHAIN (t
))
15011 decl
= OMP_CLAUSE_DECL (t
);
15013 if (TREE_CODE (decl
) == MEM_REF
)
15014 decl
= TREE_OPERAND (decl
, 0);
15016 if (VAR_P (decl
) && !is_oacc_declared (decl
))
15018 tree attr
= get_identifier ("oacc declare target");
15019 DECL_ATTRIBUTES (decl
) = tree_cons (attr
, NULL_TREE
,
15020 DECL_ATTRIBUTES (decl
));
15024 && !is_global_var (decl
)
15025 && DECL_CONTEXT (decl
) == current_function_decl
)
15027 tree c
= gimplify_oacc_declare_1 (t
);
15030 if (oacc_declare_returns
== NULL
)
15031 oacc_declare_returns
= new hash_map
<tree
, tree
>;
15033 oacc_declare_returns
->put (decl
, c
);
15037 if (gimplify_omp_ctxp
)
15038 omp_add_variable (gimplify_omp_ctxp
, decl
, GOVD_SEEN
);
15041 stmt
= gimple_build_omp_target (NULL
, GF_OMP_TARGET_KIND_OACC_DECLARE
,
15044 gimplify_seq_add_stmt (pre_p
, stmt
);
15046 *expr_p
= NULL_TREE
;
15049 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
15050 gimplification of the body, as well as scanning the body for used
15051 variables. We need to do this scan now, because variable-sized
15052 decls will be decomposed during gimplification. */
15055 gimplify_omp_parallel (tree
*expr_p
, gimple_seq
*pre_p
)
15057 tree expr
= *expr_p
;
15059 gimple_seq body
= NULL
;
15061 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr
), pre_p
,
15062 OMP_PARALLEL_COMBINED (expr
)
15063 ? ORT_COMBINED_PARALLEL
15064 : ORT_PARALLEL
, OMP_PARALLEL
);
15066 push_gimplify_context ();
15068 g
= gimplify_and_return_first (OMP_PARALLEL_BODY (expr
), &body
);
15069 if (gimple_code (g
) == GIMPLE_BIND
)
15070 pop_gimplify_context (g
);
15072 pop_gimplify_context (NULL
);
15074 gimplify_adjust_omp_clauses (pre_p
, body
, &OMP_PARALLEL_CLAUSES (expr
),
15077 g
= gimple_build_omp_parallel (body
,
15078 OMP_PARALLEL_CLAUSES (expr
),
15079 NULL_TREE
, NULL_TREE
);
15080 if (OMP_PARALLEL_COMBINED (expr
))
15081 gimple_omp_set_subcode (g
, GF_OMP_PARALLEL_COMBINED
);
15082 gimplify_seq_add_stmt (pre_p
, g
);
15083 *expr_p
= NULL_TREE
;
15086 /* Gimplify the contents of an OMP_TASK statement. This involves
15087 gimplification of the body, as well as scanning the body for used
15088 variables. We need to do this scan now, because variable-sized
15089 decls will be decomposed during gimplification. */
15092 gimplify_omp_task (tree
*expr_p
, gimple_seq
*pre_p
)
15094 tree expr
= *expr_p
;
15096 gimple_seq body
= NULL
;
15097 bool nowait
= false;
15098 bool has_depend
= false;
15100 if (OMP_TASK_BODY (expr
) == NULL_TREE
)
15102 for (tree c
= OMP_TASK_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
15103 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
15106 if (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_MUTEXINOUTSET
)
15108 error_at (OMP_CLAUSE_LOCATION (c
),
15109 "%<mutexinoutset%> kind in %<depend%> clause on a "
15110 "%<taskwait%> construct");
15114 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_NOWAIT
)
15116 if (nowait
&& !has_depend
)
15118 error_at (EXPR_LOCATION (expr
),
15119 "%<taskwait%> construct with %<nowait%> clause but no "
15120 "%<depend%> clauses");
15121 *expr_p
= NULL_TREE
;
15126 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr
), pre_p
,
15127 omp_find_clause (OMP_TASK_CLAUSES (expr
),
15129 ? ORT_UNTIED_TASK
: ORT_TASK
, OMP_TASK
);
15131 if (OMP_TASK_BODY (expr
))
15133 push_gimplify_context ();
15135 g
= gimplify_and_return_first (OMP_TASK_BODY (expr
), &body
);
15136 if (gimple_code (g
) == GIMPLE_BIND
)
15137 pop_gimplify_context (g
);
15139 pop_gimplify_context (NULL
);
15142 gimplify_adjust_omp_clauses (pre_p
, body
, &OMP_TASK_CLAUSES (expr
),
15145 g
= gimple_build_omp_task (body
,
15146 OMP_TASK_CLAUSES (expr
),
15147 NULL_TREE
, NULL_TREE
,
15148 NULL_TREE
, NULL_TREE
, NULL_TREE
);
15149 if (OMP_TASK_BODY (expr
) == NULL_TREE
)
15150 gimple_omp_task_set_taskwait_p (g
, true);
15151 gimplify_seq_add_stmt (pre_p
, g
);
15152 *expr_p
= NULL_TREE
;
15155 /* Helper function for gimplify_omp_for. If *TP is not a gimple constant,
15156 force it into a temporary initialized in PRE_P and add firstprivate clause
15157 to ORIG_FOR_STMT. */
15160 gimplify_omp_taskloop_expr (tree type
, tree
*tp
, gimple_seq
*pre_p
,
15161 tree orig_for_stmt
)
15163 if (*tp
== NULL
|| is_gimple_constant (*tp
))
15166 if (TREE_CODE (*tp
) == SAVE_EXPR
)
15167 gimplify_save_expr (tp
, pre_p
, NULL
);
15169 *tp
= get_initialized_tmp_var (*tp
, pre_p
, NULL
, false);
15170 /* Reference to pointer conversion is considered useless,
15171 but is significant for firstprivate clause. Force it
15174 && TREE_CODE (type
) == POINTER_TYPE
15175 && TREE_CODE (TREE_TYPE (*tp
)) == REFERENCE_TYPE
)
15177 tree v
= create_tmp_var (TYPE_MAIN_VARIANT (type
));
15178 tree m
= build2 (INIT_EXPR
, TREE_TYPE (v
), v
, *tp
);
15179 gimplify_and_add (m
, pre_p
);
15183 tree c
= build_omp_clause (input_location
, OMP_CLAUSE_FIRSTPRIVATE
);
15184 OMP_CLAUSE_DECL (c
) = *tp
;
15185 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (orig_for_stmt
);
15186 OMP_FOR_CLAUSES (orig_for_stmt
) = c
;
15189 /* Helper function of gimplify_omp_for, find OMP_ORDERED with
15190 null OMP_ORDERED_BODY inside of OMP_FOR's body. */
15193 find_standalone_omp_ordered (tree
*tp
, int *walk_subtrees
, void *)
15195 switch (TREE_CODE (*tp
))
15198 if (OMP_ORDERED_BODY (*tp
) == NULL_TREE
)
15204 *walk_subtrees
= 0;
15212 /* Gimplify standalone loop transforming directive which has the
15213 transformations applied already. So, all that is needed is gimplify
15214 the remaining loops as normal loops. */
15216 static enum gimplify_status
15217 gimplify_omp_loop_xform (tree
*expr_p
, gimple_seq
*pre_p
)
15219 tree for_stmt
= *expr_p
;
15221 if (OMP_FOR_PRE_BODY (for_stmt
))
15222 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt
), pre_p
);
15224 gimple_seq pre_body
= NULL
, post_body
= NULL
;
15225 for (int i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
15227 if (TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
) == NULL_TREE
)
15229 tree iters
= NULL_TREE
;
15231 && TREE_CODE (for_stmt
) == OMP_UNROLL
15232 && !omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_PARTIAL
))
15234 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_FULL
))
15235 iters
= omp_loop_number_of_iterations (for_stmt
, 0, NULL
);
15237 iters
= build_int_cst (integer_type_node
, 8);
15239 tree t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
15240 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
15241 tree decl
= TREE_OPERAND (t
, 0);
15242 gcc_assert (DECL_P (decl
));
15243 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl
))
15244 || POINTER_TYPE_P (TREE_TYPE (decl
)));
15245 if (DECL_ARTIFICIAL (decl
)
15246 && TREE_PRIVATE (t
)
15247 && gimplify_omp_ctxp
15248 && gimplify_omp_ctxp
->region_type
!= ORT_NONE
)
15250 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
15254 = splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
15257 else if (ctx
->region_type
!= ORT_WORKSHARE
15258 && ctx
->region_type
!= ORT_TASKGROUP
15259 && ctx
->region_type
!= ORT_SIMD
15260 && ctx
->region_type
!= ORT_ACC
15261 && !(ctx
->region_type
& ORT_TARGET_DATA
))
15263 omp_add_variable (ctx
, decl
, GOVD_PRIVATE
);
15266 ctx
= ctx
->outer_context
;
15270 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
)
15272 gcc_assert (seen_error ());
15275 gimplify_expr (&TREE_OPERAND (t
, 1), pre_p
, NULL
, is_gimple_val
,
15277 gimplify_and_add (t
, &pre_body
);
15278 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), i
);
15279 gcc_assert (TREE_OPERAND (t
, 0) == decl
);
15280 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
)
15282 gcc_assert (seen_error ());
15285 gimplify_expr (&TREE_OPERAND (t
, 1), pre_p
, NULL
, is_gimple_val
,
15287 tree l1
= create_artificial_label (UNKNOWN_LOCATION
);
15288 tree l2
= create_artificial_label (UNKNOWN_LOCATION
);
15289 tree l3
= create_artificial_label (UNKNOWN_LOCATION
);
15290 gimplify_seq_add_stmt (&pre_body
, gimple_build_goto (l2
));
15291 gimplify_seq_add_stmt (&pre_body
, gimple_build_label (l1
));
15292 gimple_seq this_post_body
= NULL
;
15293 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
15294 if (TREE_CODE (t
) == MODIFY_EXPR
)
15296 t
= TREE_OPERAND (t
, 1);
15297 if (TREE_CODE (t
) == PLUS_EXPR
15298 && TREE_OPERAND (t
, 1) == decl
)
15300 TREE_OPERAND (t
, 1) = TREE_OPERAND (t
, 0);
15301 TREE_OPERAND (t
, 0) = decl
;
15303 gimplify_expr (&TREE_OPERAND (t
, 1), pre_p
, NULL
, is_gimple_val
,
15306 gimplify_and_add (TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
),
15308 gimplify_seq_add_stmt (&this_post_body
, gimple_build_label (l2
));
15309 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), i
);
15310 gcond
*cond
= NULL
;
15312 gimplify_expr (&d
, &this_post_body
, NULL
, is_gimple_val
, fb_rvalue
);
15313 if (iters
&& tree_fits_uhwi_p (iters
))
15315 unsigned HOST_WIDE_INT niters
= tree_to_uhwi (iters
);
15316 if ((unsigned HOST_WIDE_INT
) (int) niters
== niters
15317 && (int) niters
> 0)
15319 t
= build2 (TREE_CODE (t
), boolean_type_node
, d
,
15320 TREE_OPERAND (t
, 1));
15321 t
= build3 (ANNOTATE_EXPR
, TREE_TYPE (t
), t
,
15322 build_int_cst (integer_type_node
,
15323 annot_expr_unroll_kind
),
15324 build_int_cst (integer_type_node
, niters
));
15325 gimplify_expr (&t
, &this_post_body
, NULL
, is_gimple_val
,
15327 cond
= gimple_build_cond (NE_EXPR
, t
, boolean_false_node
,
15332 cond
= gimple_build_cond (TREE_CODE (t
), d
, TREE_OPERAND (t
, 1),
15334 gimplify_seq_add_stmt (&this_post_body
, cond
);
15335 gimplify_seq_add_stmt (&this_post_body
, gimple_build_label (l3
));
15336 gimplify_seq_add_seq (&this_post_body
, post_body
);
15337 post_body
= this_post_body
;
15339 gimplify_seq_add_seq (pre_p
, pre_body
);
15340 gimplify_and_add (OMP_FOR_BODY (for_stmt
), pre_p
);
15341 gimplify_seq_add_seq (pre_p
, post_body
);
15343 *expr_p
= NULL_TREE
;
15344 return GS_ALL_DONE
;
15347 /* Gimplify the gross structure of an OMP_FOR statement. */
15349 static enum gimplify_status
15350 gimplify_omp_for (tree
*expr_p
, gimple_seq
*pre_p
)
15352 tree for_stmt
, orig_for_stmt
, inner_for_stmt
= NULL_TREE
, decl
, var
, t
;
15353 enum gimplify_status ret
= GS_ALL_DONE
;
15354 enum gimplify_status tret
;
15356 gimple_seq for_body
, for_pre_body
;
15358 bitmap has_decl_expr
= NULL
;
15359 enum omp_region_type ort
= ORT_WORKSHARE
;
15360 bool openacc
= TREE_CODE (*expr_p
) == OACC_LOOP
;
15362 orig_for_stmt
= for_stmt
= *expr_p
;
15364 bool loop_p
= (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_BIND
)
15366 while (OMP_FOR_INIT (for_stmt
) == NULL_TREE
)
15368 tree
*data
[4] = { NULL
, NULL
, NULL
, NULL
};
15369 gcc_assert (TREE_CODE (for_stmt
) != OACC_LOOP
);
15370 inner_for_stmt
= walk_tree (&OMP_FOR_BODY (for_stmt
),
15371 find_combined_omp_for
, data
, NULL
);
15372 if (inner_for_stmt
== NULL_TREE
)
15374 gcc_assert (seen_error ());
15375 *expr_p
= NULL_TREE
;
15378 gcc_assert (inner_for_stmt
== *data
[3]);
15379 omp_maybe_apply_loop_xforms (data
[3],
15381 ? OMP_FOR_CLAUSES (*data
[2])
15382 : TREE_CODE (for_stmt
) == OMP_FOR
15383 ? OMP_FOR_CLAUSES (for_stmt
)
15385 if (inner_for_stmt
!= *data
[3])
15387 if (data
[2] && OMP_FOR_PRE_BODY (*data
[2]))
15389 append_to_statement_list_force (OMP_FOR_PRE_BODY (*data
[2]),
15390 &OMP_FOR_PRE_BODY (for_stmt
));
15391 OMP_FOR_PRE_BODY (*data
[2]) = NULL_TREE
;
15393 if (OMP_FOR_PRE_BODY (inner_for_stmt
))
15395 append_to_statement_list_force (OMP_FOR_PRE_BODY (inner_for_stmt
),
15396 &OMP_FOR_PRE_BODY (for_stmt
));
15397 OMP_FOR_PRE_BODY (inner_for_stmt
) = NULL_TREE
;
15402 /* We have some statements or variable declarations in between
15403 the composite construct directives. Move them around the
15406 for (i
= 0; i
< 3; i
++)
15410 if (i
< 2 && data
[i
+ 1] == &OMP_BODY (t
))
15411 data
[i
+ 1] = data
[i
];
15412 *data
[i
] = OMP_BODY (t
);
15413 tree body
= build3 (BIND_EXPR
, void_type_node
, NULL_TREE
,
15414 NULL_TREE
, make_node (BLOCK
));
15415 OMP_BODY (t
) = body
;
15416 append_to_statement_list_force (inner_for_stmt
,
15417 &BIND_EXPR_BODY (body
));
15419 data
[3] = tsi_stmt_ptr (tsi_start (BIND_EXPR_BODY (body
)));
15420 gcc_assert (*data
[3] == inner_for_stmt
);
15425 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt
)); i
++)
15427 && OMP_FOR_ORIG_DECLS (inner_for_stmt
)
15428 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
15430 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
15433 tree orig
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
), i
);
15434 /* Class iterators aren't allowed on OMP_SIMD, so the only
15435 case we need to solve is distribute parallel for. They are
15436 allowed on the loop construct, but that is already handled
15437 in gimplify_omp_loop. */
15438 gcc_assert (TREE_CODE (inner_for_stmt
) == OMP_FOR
15439 && TREE_CODE (for_stmt
) == OMP_DISTRIBUTE
15441 tree orig_decl
= TREE_PURPOSE (orig
);
15442 tree last
= TREE_VALUE (orig
);
15444 for (pc
= &OMP_FOR_CLAUSES (inner_for_stmt
);
15445 *pc
; pc
= &OMP_CLAUSE_CHAIN (*pc
))
15446 if ((OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_PRIVATE
15447 || OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_LASTPRIVATE
)
15448 && OMP_CLAUSE_DECL (*pc
) == orig_decl
)
15450 if (*pc
== NULL_TREE
)
15453 for (spc
= &OMP_PARALLEL_CLAUSES (*data
[1]);
15454 *spc
; spc
= &OMP_CLAUSE_CHAIN (*spc
))
15455 if (OMP_CLAUSE_CODE (*spc
) == OMP_CLAUSE_PRIVATE
15456 && OMP_CLAUSE_DECL (*spc
) == orig_decl
)
15461 *spc
= OMP_CLAUSE_CHAIN (c
);
15462 OMP_CLAUSE_CHAIN (c
) = NULL_TREE
;
15466 if (*pc
== NULL_TREE
)
15468 else if (OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_PRIVATE
)
15470 /* private clause will appear only on inner_for_stmt.
15471 Change it into firstprivate, and add private clause
15473 tree c
= copy_node (*pc
);
15474 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (for_stmt
);
15475 OMP_FOR_CLAUSES (for_stmt
) = c
;
15476 OMP_CLAUSE_CODE (*pc
) = OMP_CLAUSE_FIRSTPRIVATE
;
15477 lang_hooks
.decls
.omp_finish_clause (*pc
, pre_p
, openacc
);
15481 /* lastprivate clause will appear on both inner_for_stmt
15482 and for_stmt. Add firstprivate clause to
15484 tree c
= build_omp_clause (OMP_CLAUSE_LOCATION (*pc
),
15485 OMP_CLAUSE_FIRSTPRIVATE
);
15486 OMP_CLAUSE_DECL (c
) = OMP_CLAUSE_DECL (*pc
);
15487 OMP_CLAUSE_CHAIN (c
) = *pc
;
15489 lang_hooks
.decls
.omp_finish_clause (*pc
, pre_p
, openacc
);
15491 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
15492 OMP_CLAUSE_FIRSTPRIVATE
);
15493 OMP_CLAUSE_DECL (c
) = last
;
15494 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
15495 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
15496 c
= build_omp_clause (UNKNOWN_LOCATION
,
15497 *pc
? OMP_CLAUSE_SHARED
15498 : OMP_CLAUSE_FIRSTPRIVATE
);
15499 OMP_CLAUSE_DECL (c
) = orig_decl
;
15500 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
15501 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
15503 /* Similarly, take care of C++ range for temporaries, those should
15504 be firstprivate on OMP_PARALLEL if any. */
15506 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt
)); i
++)
15507 if (OMP_FOR_ORIG_DECLS (inner_for_stmt
)
15508 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
15510 && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
15514 = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
), i
);
15515 tree v
= TREE_CHAIN (orig
);
15516 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
15517 OMP_CLAUSE_FIRSTPRIVATE
);
15518 /* First add firstprivate clause for the __for_end artificial
15520 OMP_CLAUSE_DECL (c
) = TREE_VEC_ELT (v
, 1);
15521 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c
)))
15523 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
) = 1;
15524 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
15525 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
15526 if (TREE_VEC_ELT (v
, 0))
15528 /* And now the same for __for_range artificial decl if it
15530 c
= build_omp_clause (UNKNOWN_LOCATION
,
15531 OMP_CLAUSE_FIRSTPRIVATE
);
15532 OMP_CLAUSE_DECL (c
) = TREE_VEC_ELT (v
, 0);
15533 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c
)))
15535 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
) = 1;
15536 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
15537 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
15542 if (OMP_FOR_INIT (for_stmt
) != NULL_TREE
)
15544 omp_maybe_apply_loop_xforms (expr_p
, NULL_TREE
);
15545 if (*expr_p
!= for_stmt
)
15549 switch (TREE_CODE (for_stmt
))
15552 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt
? inner_for_stmt
: for_stmt
))
15554 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
15555 OMP_CLAUSE_SCHEDULE
))
15556 error_at (EXPR_LOCATION (for_stmt
),
15557 "%qs clause may not appear on non-rectangular %qs",
15558 "schedule", lang_GNU_Fortran () ? "do" : "for");
15559 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_ORDERED
))
15560 error_at (EXPR_LOCATION (for_stmt
),
15561 "%qs clause may not appear on non-rectangular %qs",
15562 "ordered", lang_GNU_Fortran () ? "do" : "for");
15565 case OMP_DISTRIBUTE
:
15566 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt
? inner_for_stmt
: for_stmt
)
15567 && omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
15568 OMP_CLAUSE_DIST_SCHEDULE
))
15569 error_at (EXPR_LOCATION (for_stmt
),
15570 "%qs clause may not appear on non-rectangular %qs",
15571 "dist_schedule", "distribute");
15577 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt
? inner_for_stmt
: for_stmt
))
15579 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
15580 OMP_CLAUSE_GRAINSIZE
))
15581 error_at (EXPR_LOCATION (for_stmt
),
15582 "%qs clause may not appear on non-rectangular %qs",
15583 "grainsize", "taskloop");
15584 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
15585 OMP_CLAUSE_NUM_TASKS
))
15586 error_at (EXPR_LOCATION (for_stmt
),
15587 "%qs clause may not appear on non-rectangular %qs",
15588 "num_tasks", "taskloop");
15590 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_UNTIED
))
15591 ort
= ORT_UNTIED_TASKLOOP
;
15593 ort
= ORT_TASKLOOP
;
15600 gcc_assert (inner_for_stmt
== NULL_TREE
);
15601 return gimplify_omp_loop_xform (expr_p
, pre_p
);
15603 gcc_unreachable ();
15606 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
15607 clause for the IV. */
15608 if (ort
== ORT_SIMD
&& TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) == 1)
15610 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), 0);
15611 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
15612 decl
= TREE_OPERAND (t
, 0);
15613 for (tree c
= OMP_FOR_CLAUSES (for_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
15614 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
15615 && OMP_CLAUSE_DECL (c
) == decl
)
15617 OMP_CLAUSE_LINEAR_NO_COPYIN (c
) = 1;
15622 if (TREE_CODE (for_stmt
) != OMP_TASKLOOP
)
15623 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt
), pre_p
, ort
,
15624 loop_p
&& TREE_CODE (for_stmt
) != OMP_SIMD
15625 ? OMP_LOOP
: TREE_CODE (for_stmt
));
15627 if (TREE_CODE (for_stmt
) == OMP_DISTRIBUTE
)
15628 gimplify_omp_ctxp
->distribute
= true;
15630 /* Handle OMP_FOR_INIT. */
15631 for_pre_body
= NULL
;
15632 if ((ort
== ORT_SIMD
15633 || (inner_for_stmt
&& TREE_CODE (inner_for_stmt
) == OMP_SIMD
))
15634 && OMP_FOR_PRE_BODY (for_stmt
))
15636 has_decl_expr
= BITMAP_ALLOC (NULL
);
15637 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt
)) == DECL_EXPR
15638 && VAR_P (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt
))))
15640 t
= OMP_FOR_PRE_BODY (for_stmt
);
15641 bitmap_set_bit (has_decl_expr
, DECL_UID (DECL_EXPR_DECL (t
)));
15643 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt
)) == STATEMENT_LIST
)
15645 tree_stmt_iterator si
;
15646 for (si
= tsi_start (OMP_FOR_PRE_BODY (for_stmt
)); !tsi_end_p (si
);
15650 if (TREE_CODE (t
) == DECL_EXPR
15651 && VAR_P (DECL_EXPR_DECL (t
)))
15652 bitmap_set_bit (has_decl_expr
, DECL_UID (DECL_EXPR_DECL (t
)));
15656 if (OMP_FOR_PRE_BODY (for_stmt
))
15658 if (TREE_CODE (for_stmt
) != OMP_TASKLOOP
|| gimplify_omp_ctxp
)
15659 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt
), &for_pre_body
);
15662 struct gimplify_omp_ctx ctx
;
15663 memset (&ctx
, 0, sizeof (ctx
));
15664 ctx
.region_type
= ORT_NONE
;
15665 gimplify_omp_ctxp
= &ctx
;
15666 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt
), &for_pre_body
);
15667 gimplify_omp_ctxp
= NULL
;
15670 OMP_FOR_PRE_BODY (for_stmt
) = NULL_TREE
;
15672 if (OMP_FOR_INIT (for_stmt
) == NULL_TREE
)
15673 for_stmt
= inner_for_stmt
;
15675 /* For taskloop, need to gimplify the start, end and step before the
15676 taskloop, outside of the taskloop omp context. */
15677 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
15679 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
15681 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
15682 gimple_seq
*for_pre_p
= (gimple_seq_empty_p (for_pre_body
)
15683 ? pre_p
: &for_pre_body
);
15684 tree type
= TREE_TYPE (TREE_OPERAND (t
, 0));
15685 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
)
15687 tree v
= TREE_OPERAND (t
, 1);
15688 gimplify_omp_taskloop_expr (type
, &TREE_VEC_ELT (v
, 1),
15689 for_pre_p
, orig_for_stmt
);
15690 gimplify_omp_taskloop_expr (type
, &TREE_VEC_ELT (v
, 2),
15691 for_pre_p
, orig_for_stmt
);
15694 gimplify_omp_taskloop_expr (type
, &TREE_OPERAND (t
, 1), for_pre_p
,
15697 /* Handle OMP_FOR_COND. */
15698 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), i
);
15699 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
)
15701 tree v
= TREE_OPERAND (t
, 1);
15702 gimplify_omp_taskloop_expr (type
, &TREE_VEC_ELT (v
, 1),
15703 for_pre_p
, orig_for_stmt
);
15704 gimplify_omp_taskloop_expr (type
, &TREE_VEC_ELT (v
, 2),
15705 for_pre_p
, orig_for_stmt
);
15708 gimplify_omp_taskloop_expr (type
, &TREE_OPERAND (t
, 1), for_pre_p
,
15711 /* Handle OMP_FOR_INCR. */
15712 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
15713 if (TREE_CODE (t
) == MODIFY_EXPR
)
15715 decl
= TREE_OPERAND (t
, 0);
15716 t
= TREE_OPERAND (t
, 1);
15717 tree
*tp
= &TREE_OPERAND (t
, 1);
15718 if (TREE_CODE (t
) == PLUS_EXPR
&& *tp
== decl
)
15719 tp
= &TREE_OPERAND (t
, 0);
15721 gimplify_omp_taskloop_expr (NULL_TREE
, tp
, for_pre_p
,
15726 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt
), pre_p
, ort
,
15730 if (orig_for_stmt
!= for_stmt
)
15731 gimplify_omp_ctxp
->combined_loop
= true;
15734 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
))
15735 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt
)));
15736 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
))
15737 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt
)));
15739 tree c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_ORDERED
);
15740 bool is_doacross
= false;
15741 if (c
&& walk_tree_without_duplicates (&OMP_FOR_BODY (for_stmt
),
15742 find_standalone_omp_ordered
, NULL
))
15744 OMP_CLAUSE_ORDERED_DOACROSS (c
) = 1;
15745 is_doacross
= true;
15746 int len
= TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
));
15747 gimplify_omp_ctxp
->loop_iter_var
.create (len
* 2);
15748 for (tree
*pc
= &OMP_FOR_CLAUSES (for_stmt
); *pc
; )
15749 if (OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_LINEAR
)
15751 error_at (OMP_CLAUSE_LOCATION (*pc
),
15752 "%<linear%> clause may not be specified together "
15753 "with %<ordered%> clause if stand-alone %<ordered%> "
15754 "construct is nested in it");
15755 *pc
= OMP_CLAUSE_CHAIN (*pc
);
15758 pc
= &OMP_CLAUSE_CHAIN (*pc
);
15760 int collapse
= 1, tile
= 0;
15761 c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_COLLAPSE
);
15763 collapse
= tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c
));
15764 c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_TILE
);
15766 tile
= list_length (OMP_CLAUSE_TILE_LIST (c
));
15767 c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_ALLOCATE
);
15768 hash_set
<tree
> *allocate_uids
= NULL
;
15771 allocate_uids
= new hash_set
<tree
>;
15772 for (; c
; c
= OMP_CLAUSE_CHAIN (c
))
15773 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_ALLOCATE
)
15774 allocate_uids
->add (OMP_CLAUSE_DECL (c
));
15776 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
15778 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
15779 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
15780 decl
= TREE_OPERAND (t
, 0);
15781 gcc_assert (DECL_P (decl
));
15782 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl
))
15783 || POINTER_TYPE_P (TREE_TYPE (decl
)));
15786 if (TREE_CODE (for_stmt
) == OMP_FOR
&& OMP_FOR_ORIG_DECLS (for_stmt
))
15788 tree orig_decl
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
15789 if (TREE_CODE (orig_decl
) == TREE_LIST
)
15791 orig_decl
= TREE_PURPOSE (orig_decl
);
15795 gimplify_omp_ctxp
->loop_iter_var
.quick_push (orig_decl
);
15798 gimplify_omp_ctxp
->loop_iter_var
.quick_push (decl
);
15799 gimplify_omp_ctxp
->loop_iter_var
.quick_push (decl
);
15802 if (for_stmt
== orig_for_stmt
)
15804 tree orig_decl
= decl
;
15805 if (OMP_FOR_ORIG_DECLS (for_stmt
))
15807 tree orig_decl
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
15808 if (TREE_CODE (orig_decl
) == TREE_LIST
)
15810 orig_decl
= TREE_PURPOSE (orig_decl
);
15815 if (is_global_var (orig_decl
) && DECL_THREAD_LOCAL_P (orig_decl
))
15816 error_at (EXPR_LOCATION (for_stmt
),
15817 "threadprivate iteration variable %qD", orig_decl
);
15820 /* Make sure the iteration variable is private. */
15821 tree c
= NULL_TREE
;
15822 tree c2
= NULL_TREE
;
15823 if (orig_for_stmt
!= for_stmt
)
15825 /* Preserve this information until we gimplify the inner simd. */
15827 && bitmap_bit_p (has_decl_expr
, DECL_UID (decl
)))
15828 TREE_PRIVATE (t
) = 1;
15830 else if (ort
== ORT_SIMD
)
15832 splay_tree_node n
= splay_tree_lookup (gimplify_omp_ctxp
->variables
,
15833 (splay_tree_key
) decl
);
15834 omp_is_private (gimplify_omp_ctxp
, decl
,
15835 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
))
15837 if (n
!= NULL
&& (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
15839 omp_notice_variable (gimplify_omp_ctxp
, decl
, true);
15840 if (n
->value
& GOVD_LASTPRIVATE_CONDITIONAL
)
15841 for (tree c3
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
15842 OMP_CLAUSE_LASTPRIVATE
);
15843 c3
; c3
= omp_find_clause (OMP_CLAUSE_CHAIN (c3
),
15844 OMP_CLAUSE_LASTPRIVATE
))
15845 if (OMP_CLAUSE_DECL (c3
) == decl
)
15847 warning_at (OMP_CLAUSE_LOCATION (c3
), OPT_Wopenmp
,
15848 "conditional %<lastprivate%> on loop "
15849 "iterator %qD ignored", decl
);
15850 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3
) = 0;
15851 n
->value
&= ~GOVD_LASTPRIVATE_CONDITIONAL
;
15854 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) == 1 && !loop_p
)
15856 c
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
15857 OMP_CLAUSE_LINEAR_NO_COPYIN (c
) = 1;
15858 unsigned int flags
= GOVD_LINEAR
| GOVD_EXPLICIT
| GOVD_SEEN
;
15860 && bitmap_bit_p (has_decl_expr
, DECL_UID (decl
)))
15861 || TREE_PRIVATE (t
))
15863 OMP_CLAUSE_LINEAR_NO_COPYOUT (c
) = 1;
15864 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
15866 struct gimplify_omp_ctx
*outer
15867 = gimplify_omp_ctxp
->outer_context
;
15868 if (outer
&& !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
15870 if (outer
->region_type
== ORT_WORKSHARE
15871 && outer
->combined_loop
)
15873 n
= splay_tree_lookup (outer
->variables
,
15874 (splay_tree_key
)decl
);
15875 if (n
!= NULL
&& (n
->value
& GOVD_LOCAL
) != 0)
15877 OMP_CLAUSE_LINEAR_NO_COPYOUT (c
) = 1;
15878 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
15882 struct gimplify_omp_ctx
*octx
= outer
->outer_context
;
15884 && octx
->region_type
== ORT_COMBINED_PARALLEL
15885 && octx
->outer_context
15886 && (octx
->outer_context
->region_type
15888 && octx
->outer_context
->combined_loop
)
15890 octx
= octx
->outer_context
;
15891 n
= splay_tree_lookup (octx
->variables
,
15892 (splay_tree_key
)decl
);
15893 if (n
!= NULL
&& (n
->value
& GOVD_LOCAL
) != 0)
15895 OMP_CLAUSE_LINEAR_NO_COPYOUT (c
) = 1;
15896 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
15903 OMP_CLAUSE_DECL (c
) = decl
;
15904 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (for_stmt
);
15905 OMP_FOR_CLAUSES (for_stmt
) = c
;
15906 omp_add_variable (gimplify_omp_ctxp
, decl
, flags
);
15907 if (outer
&& !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
15908 omp_lastprivate_for_combined_outer_constructs (outer
, decl
,
15915 || !bitmap_bit_p (has_decl_expr
, DECL_UID (decl
)));
15916 if (TREE_PRIVATE (t
))
15917 lastprivate
= false;
15918 if (loop_p
&& OMP_FOR_ORIG_DECLS (for_stmt
))
15920 tree elt
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
15921 if (TREE_CODE (elt
) == TREE_LIST
&& TREE_PURPOSE (elt
))
15922 lastprivate
= false;
15925 struct gimplify_omp_ctx
*outer
15926 = gimplify_omp_ctxp
->outer_context
;
15927 if (outer
&& lastprivate
)
15928 omp_lastprivate_for_combined_outer_constructs (outer
, decl
,
15931 c
= build_omp_clause (input_location
,
15932 lastprivate
? OMP_CLAUSE_LASTPRIVATE
15933 : OMP_CLAUSE_PRIVATE
);
15934 OMP_CLAUSE_DECL (c
) = decl
;
15935 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (for_stmt
);
15936 OMP_FOR_CLAUSES (for_stmt
) = c
;
15937 omp_add_variable (gimplify_omp_ctxp
, decl
,
15938 (lastprivate
? GOVD_LASTPRIVATE
: GOVD_PRIVATE
)
15939 | GOVD_EXPLICIT
| GOVD_SEEN
);
15943 else if (omp_is_private (gimplify_omp_ctxp
, decl
, 0))
15945 omp_notice_variable (gimplify_omp_ctxp
, decl
, true);
15946 splay_tree_node n
= splay_tree_lookup (gimplify_omp_ctxp
->variables
,
15947 (splay_tree_key
) decl
);
15948 if (n
&& (n
->value
& GOVD_LASTPRIVATE_CONDITIONAL
))
15949 for (tree c3
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
15950 OMP_CLAUSE_LASTPRIVATE
);
15951 c3
; c3
= omp_find_clause (OMP_CLAUSE_CHAIN (c3
),
15952 OMP_CLAUSE_LASTPRIVATE
))
15953 if (OMP_CLAUSE_DECL (c3
) == decl
)
15955 warning_at (OMP_CLAUSE_LOCATION (c3
), OPT_Wopenmp
,
15956 "conditional %<lastprivate%> on loop "
15957 "iterator %qD ignored", decl
);
15958 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3
) = 0;
15959 n
->value
&= ~GOVD_LASTPRIVATE_CONDITIONAL
;
15963 omp_add_variable (gimplify_omp_ctxp
, decl
, GOVD_PRIVATE
| GOVD_SEEN
);
15965 /* If DECL is not a gimple register, create a temporary variable to act
15966 as an iteration counter. This is valid, since DECL cannot be
15967 modified in the body of the loop. Similarly for any iteration vars
15968 in simd with collapse > 1 where the iterator vars must be
15969 lastprivate. And similarly for vars mentioned in allocate clauses. */
15970 if (orig_for_stmt
!= for_stmt
)
15972 else if (!is_gimple_reg (decl
)
15973 || (ort
== ORT_SIMD
15974 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) > 1)
15975 || (allocate_uids
&& allocate_uids
->contains (decl
)))
15977 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
15978 /* Make sure omp_add_variable is not called on it prematurely.
15979 We call it ourselves a few lines later. */
15980 gimplify_omp_ctxp
= NULL
;
15981 var
= create_tmp_var (TREE_TYPE (decl
), get_name (decl
));
15982 gimplify_omp_ctxp
= ctx
;
15983 TREE_OPERAND (t
, 0) = var
;
15985 gimplify_seq_add_stmt (&for_body
, gimple_build_assign (decl
, var
));
15987 if (ort
== ORT_SIMD
15988 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) == 1)
15990 c2
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
15991 OMP_CLAUSE_LINEAR_NO_COPYIN (c2
) = 1;
15992 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2
) = 1;
15993 OMP_CLAUSE_DECL (c2
) = var
;
15994 OMP_CLAUSE_CHAIN (c2
) = OMP_FOR_CLAUSES (for_stmt
);
15995 OMP_FOR_CLAUSES (for_stmt
) = c2
;
15996 omp_add_variable (gimplify_omp_ctxp
, var
,
15997 GOVD_LINEAR
| GOVD_EXPLICIT
| GOVD_SEEN
);
15998 if (c
== NULL_TREE
)
16005 omp_add_variable (gimplify_omp_ctxp
, var
,
16006 GOVD_PRIVATE
| GOVD_SEEN
);
16011 gimplify_omp_ctxp
->in_for_exprs
= true;
16012 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
)
16014 tree lb
= TREE_OPERAND (t
, 1);
16015 tret
= gimplify_expr (&TREE_VEC_ELT (lb
, 1), &for_pre_body
, NULL
,
16016 is_gimple_val
, fb_rvalue
, false);
16017 ret
= MIN (ret
, tret
);
16018 tret
= gimplify_expr (&TREE_VEC_ELT (lb
, 2), &for_pre_body
, NULL
,
16019 is_gimple_val
, fb_rvalue
, false);
16022 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), &for_pre_body
, NULL
,
16023 is_gimple_val
, fb_rvalue
, false);
16024 gimplify_omp_ctxp
->in_for_exprs
= false;
16025 ret
= MIN (ret
, tret
);
16026 if (ret
== GS_ERROR
)
16029 /* Handle OMP_FOR_COND. */
16030 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), i
);
16031 gcc_assert (COMPARISON_CLASS_P (t
));
16032 gcc_assert (TREE_OPERAND (t
, 0) == decl
);
16034 gimplify_omp_ctxp
->in_for_exprs
= true;
16035 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
)
16037 tree ub
= TREE_OPERAND (t
, 1);
16038 tret
= gimplify_expr (&TREE_VEC_ELT (ub
, 1), &for_pre_body
, NULL
,
16039 is_gimple_val
, fb_rvalue
, false);
16040 ret
= MIN (ret
, tret
);
16041 tret
= gimplify_expr (&TREE_VEC_ELT (ub
, 2), &for_pre_body
, NULL
,
16042 is_gimple_val
, fb_rvalue
, false);
16045 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), &for_pre_body
, NULL
,
16046 is_gimple_val
, fb_rvalue
, false);
16047 gimplify_omp_ctxp
->in_for_exprs
= false;
16048 ret
= MIN (ret
, tret
);
16050 /* Handle OMP_FOR_INCR. */
16051 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
16052 switch (TREE_CODE (t
))
16054 case PREINCREMENT_EXPR
:
16055 case POSTINCREMENT_EXPR
:
16057 tree decl
= TREE_OPERAND (t
, 0);
16058 /* c_omp_for_incr_canonicalize_ptr() should have been
16059 called to massage things appropriately. */
16060 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl
)));
16062 if (orig_for_stmt
!= for_stmt
)
16064 t
= build_int_cst (TREE_TYPE (decl
), 1);
16066 OMP_CLAUSE_LINEAR_STEP (c
) = t
;
16067 t
= build2 (PLUS_EXPR
, TREE_TYPE (decl
), var
, t
);
16068 t
= build2 (MODIFY_EXPR
, TREE_TYPE (var
), var
, t
);
16069 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
) = t
;
16073 case PREDECREMENT_EXPR
:
16074 case POSTDECREMENT_EXPR
:
16075 /* c_omp_for_incr_canonicalize_ptr() should have been
16076 called to massage things appropriately. */
16077 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl
)));
16078 if (orig_for_stmt
!= for_stmt
)
16080 t
= build_int_cst (TREE_TYPE (decl
), -1);
16082 OMP_CLAUSE_LINEAR_STEP (c
) = t
;
16083 t
= build2 (PLUS_EXPR
, TREE_TYPE (decl
), var
, t
);
16084 t
= build2 (MODIFY_EXPR
, TREE_TYPE (var
), var
, t
);
16085 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
) = t
;
16089 gcc_assert (TREE_OPERAND (t
, 0) == decl
);
16090 TREE_OPERAND (t
, 0) = var
;
16092 t
= TREE_OPERAND (t
, 1);
16093 switch (TREE_CODE (t
))
16096 if (TREE_OPERAND (t
, 1) == decl
)
16098 TREE_OPERAND (t
, 1) = TREE_OPERAND (t
, 0);
16099 TREE_OPERAND (t
, 0) = var
;
16105 case POINTER_PLUS_EXPR
:
16106 gcc_assert (TREE_OPERAND (t
, 0) == decl
);
16107 TREE_OPERAND (t
, 0) = var
;
16110 gcc_unreachable ();
16113 gimplify_omp_ctxp
->in_for_exprs
= true;
16114 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), &for_pre_body
, NULL
,
16115 is_gimple_val
, fb_rvalue
, false);
16116 ret
= MIN (ret
, tret
);
16119 tree step
= TREE_OPERAND (t
, 1);
16120 tree stept
= TREE_TYPE (decl
);
16121 if (POINTER_TYPE_P (stept
))
16123 step
= fold_convert (stept
, step
);
16124 if (TREE_CODE (t
) == MINUS_EXPR
)
16125 step
= fold_build1 (NEGATE_EXPR
, stept
, step
);
16126 OMP_CLAUSE_LINEAR_STEP (c
) = step
;
16127 if (step
!= TREE_OPERAND (t
, 1))
16129 tret
= gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c
),
16130 &for_pre_body
, NULL
,
16131 is_gimple_val
, fb_rvalue
, false);
16132 ret
= MIN (ret
, tret
);
16135 gimplify_omp_ctxp
->in_for_exprs
= false;
16139 gcc_unreachable ();
16145 OMP_CLAUSE_LINEAR_STEP (c2
) = OMP_CLAUSE_LINEAR_STEP (c
);
16148 if ((var
!= decl
|| collapse
> 1 || tile
) && orig_for_stmt
== for_stmt
)
16150 for (c
= OMP_FOR_CLAUSES (for_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
16151 if (((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
16152 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
) == NULL
)
16153 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
16154 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)
16155 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
) == NULL
))
16156 && OMP_CLAUSE_DECL (c
) == decl
)
16158 if (is_doacross
&& (collapse
== 1 || i
>= collapse
))
16162 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
16163 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
16164 gcc_assert (TREE_OPERAND (t
, 0) == var
);
16165 t
= TREE_OPERAND (t
, 1);
16166 gcc_assert (TREE_CODE (t
) == PLUS_EXPR
16167 || TREE_CODE (t
) == MINUS_EXPR
16168 || TREE_CODE (t
) == POINTER_PLUS_EXPR
);
16169 gcc_assert (TREE_OPERAND (t
, 0) == var
);
16170 t
= build2 (TREE_CODE (t
), TREE_TYPE (decl
),
16171 is_doacross
? var
: decl
,
16172 TREE_OPERAND (t
, 1));
16175 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
)
16176 seq
= &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
);
16178 seq
= &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
);
16179 push_gimplify_context ();
16180 gimplify_assign (decl
, t
, seq
);
16181 gimple
*bind
= NULL
;
16182 if (gimplify_ctxp
->temps
)
16184 bind
= gimple_build_bind (NULL_TREE
, *seq
, NULL_TREE
);
16186 gimplify_seq_add_stmt (seq
, bind
);
16188 pop_gimplify_context (bind
);
16191 if (OMP_FOR_NON_RECTANGULAR (for_stmt
) && var
!= decl
)
16192 for (int j
= i
+ 1; j
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); j
++)
16194 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), j
);
16195 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
16196 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
16197 && TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) == decl
)
16198 TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) = var
;
16199 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), j
);
16200 gcc_assert (COMPARISON_CLASS_P (t
));
16201 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
16202 && TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) == decl
)
16203 TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) = var
;
16207 BITMAP_FREE (has_decl_expr
);
16208 delete allocate_uids
;
16210 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
16211 || (loop_p
&& orig_for_stmt
== for_stmt
))
16213 push_gimplify_context ();
16214 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt
)) != BIND_EXPR
)
16216 OMP_FOR_BODY (orig_for_stmt
)
16217 = build3 (BIND_EXPR
, void_type_node
, NULL
,
16218 OMP_FOR_BODY (orig_for_stmt
), NULL
);
16219 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt
)) = 1;
16223 gimple
*g
= gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt
),
16226 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
16227 || (loop_p
&& orig_for_stmt
== for_stmt
))
16229 if (gimple_code (g
) == GIMPLE_BIND
)
16230 pop_gimplify_context (g
);
16232 pop_gimplify_context (NULL
);
16235 if (orig_for_stmt
!= for_stmt
)
16236 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
16238 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
16239 decl
= TREE_OPERAND (t
, 0);
16240 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
16241 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
16242 gimplify_omp_ctxp
= ctx
->outer_context
;
16243 var
= create_tmp_var (TREE_TYPE (decl
), get_name (decl
));
16244 gimplify_omp_ctxp
= ctx
;
16245 omp_add_variable (gimplify_omp_ctxp
, var
, GOVD_PRIVATE
| GOVD_SEEN
);
16246 TREE_OPERAND (t
, 0) = var
;
16247 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
16248 TREE_OPERAND (t
, 1) = copy_node (TREE_OPERAND (t
, 1));
16249 TREE_OPERAND (TREE_OPERAND (t
, 1), 0) = var
;
16250 if (OMP_FOR_NON_RECTANGULAR (for_stmt
))
16251 for (int j
= i
+ 1;
16252 j
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); j
++)
16254 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), j
);
16255 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
16256 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
16257 && TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) == decl
)
16259 TREE_OPERAND (t
, 1) = copy_node (TREE_OPERAND (t
, 1));
16260 TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) = var
;
16262 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), j
);
16263 gcc_assert (COMPARISON_CLASS_P (t
));
16264 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
16265 && TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) == decl
)
16267 TREE_OPERAND (t
, 1) = copy_node (TREE_OPERAND (t
, 1));
16268 TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) = var
;
16273 gimplify_adjust_omp_clauses (pre_p
, for_body
,
16274 &OMP_FOR_CLAUSES (orig_for_stmt
),
16275 TREE_CODE (orig_for_stmt
));
16278 switch (TREE_CODE (orig_for_stmt
))
16280 case OMP_FOR
: kind
= GF_OMP_FOR_KIND_FOR
; break;
16281 case OMP_SIMD
: kind
= GF_OMP_FOR_KIND_SIMD
; break;
16282 case OMP_DISTRIBUTE
: kind
= GF_OMP_FOR_KIND_DISTRIBUTE
; break;
16283 case OMP_TASKLOOP
: kind
= GF_OMP_FOR_KIND_TASKLOOP
; break;
16284 case OACC_LOOP
: kind
= GF_OMP_FOR_KIND_OACC_LOOP
; break;
16286 gcc_unreachable ();
16288 if (loop_p
&& kind
== GF_OMP_FOR_KIND_SIMD
)
16290 gimplify_seq_add_seq (pre_p
, for_pre_body
);
16291 for_pre_body
= NULL
;
16293 gfor
= gimple_build_omp_for (for_body
, kind
, OMP_FOR_CLAUSES (orig_for_stmt
),
16294 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)),
16296 if (orig_for_stmt
!= for_stmt
)
16297 gimple_omp_for_set_combined_p (gfor
, true);
16298 if (gimplify_omp_ctxp
16299 && (gimplify_omp_ctxp
->combined_loop
16300 || (gimplify_omp_ctxp
->region_type
== ORT_COMBINED_PARALLEL
16301 && gimplify_omp_ctxp
->outer_context
16302 && gimplify_omp_ctxp
->outer_context
->combined_loop
)))
16304 gimple_omp_for_set_combined_into_p (gfor
, true);
16305 if (gimplify_omp_ctxp
->combined_loop
)
16306 gcc_assert (TREE_CODE (orig_for_stmt
) == OMP_SIMD
);
16308 gcc_assert (TREE_CODE (orig_for_stmt
) == OMP_FOR
);
16311 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
16313 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
16314 gimple_omp_for_set_index (gfor
, i
, TREE_OPERAND (t
, 0));
16315 gimple_omp_for_set_initial (gfor
, i
, TREE_OPERAND (t
, 1));
16316 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), i
);
16317 gimple_omp_for_set_cond (gfor
, i
, TREE_CODE (t
));
16318 gimple_omp_for_set_final (gfor
, i
, TREE_OPERAND (t
, 1));
16319 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
16320 gimple_omp_for_set_incr (gfor
, i
, TREE_OPERAND (t
, 1));
16323 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
16324 constructs with GIMPLE_OMP_TASK sandwiched in between them.
16325 The outer taskloop stands for computing the number of iterations,
16326 counts for collapsed loops and holding taskloop specific clauses.
16327 The task construct stands for the effect of data sharing on the
16328 explicit task it creates and the inner taskloop stands for expansion
16329 of the static loop inside of the explicit task construct. */
16330 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
16332 tree
*gfor_clauses_ptr
= gimple_omp_for_clauses_ptr (gfor
);
16333 tree task_clauses
= NULL_TREE
;
16334 tree c
= *gfor_clauses_ptr
;
16335 tree
*gtask_clauses_ptr
= &task_clauses
;
16336 tree outer_for_clauses
= NULL_TREE
;
16337 tree
*gforo_clauses_ptr
= &outer_for_clauses
;
16338 bitmap lastprivate_uids
= NULL
;
16339 if (omp_find_clause (c
, OMP_CLAUSE_ALLOCATE
))
16341 c
= omp_find_clause (c
, OMP_CLAUSE_LASTPRIVATE
);
16344 lastprivate_uids
= BITMAP_ALLOC (NULL
);
16345 for (; c
; c
= omp_find_clause (OMP_CLAUSE_CHAIN (c
),
16346 OMP_CLAUSE_LASTPRIVATE
))
16347 bitmap_set_bit (lastprivate_uids
,
16348 DECL_UID (OMP_CLAUSE_DECL (c
)));
16350 c
= *gfor_clauses_ptr
;
16352 for (; c
; c
= OMP_CLAUSE_CHAIN (c
))
16353 switch (OMP_CLAUSE_CODE (c
))
16355 /* These clauses are allowed on task, move them there. */
16356 case OMP_CLAUSE_SHARED
:
16357 case OMP_CLAUSE_FIRSTPRIVATE
:
16358 case OMP_CLAUSE_DEFAULT
:
16359 case OMP_CLAUSE_IF
:
16360 case OMP_CLAUSE_UNTIED
:
16361 case OMP_CLAUSE_FINAL
:
16362 case OMP_CLAUSE_MERGEABLE
:
16363 case OMP_CLAUSE_PRIORITY
:
16364 case OMP_CLAUSE_REDUCTION
:
16365 case OMP_CLAUSE_IN_REDUCTION
:
16366 *gtask_clauses_ptr
= c
;
16367 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
16369 case OMP_CLAUSE_PRIVATE
:
16370 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c
))
16372 /* We want private on outer for and firstprivate
16375 = build_omp_clause (OMP_CLAUSE_LOCATION (c
),
16376 OMP_CLAUSE_FIRSTPRIVATE
);
16377 OMP_CLAUSE_DECL (*gtask_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
16378 lang_hooks
.decls
.omp_finish_clause (*gtask_clauses_ptr
, NULL
,
16380 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
16381 *gforo_clauses_ptr
= c
;
16382 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
16386 *gtask_clauses_ptr
= c
;
16387 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
16390 /* These clauses go into outer taskloop clauses. */
16391 case OMP_CLAUSE_GRAINSIZE
:
16392 case OMP_CLAUSE_NUM_TASKS
:
16393 case OMP_CLAUSE_NOGROUP
:
16394 *gforo_clauses_ptr
= c
;
16395 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
16397 /* Collapse clause we duplicate on both taskloops. */
16398 case OMP_CLAUSE_COLLAPSE
:
16399 *gfor_clauses_ptr
= c
;
16400 gfor_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
16401 *gforo_clauses_ptr
= copy_node (c
);
16402 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr
);
16404 /* For lastprivate, keep the clause on inner taskloop, and add
16405 a shared clause on task. If the same decl is also firstprivate,
16406 add also firstprivate clause on the inner taskloop. */
16407 case OMP_CLAUSE_LASTPRIVATE
:
16408 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
))
16410 /* For taskloop C++ lastprivate IVs, we want:
16411 1) private on outer taskloop
16412 2) firstprivate and shared on task
16413 3) lastprivate on inner taskloop */
16415 = build_omp_clause (OMP_CLAUSE_LOCATION (c
),
16416 OMP_CLAUSE_FIRSTPRIVATE
);
16417 OMP_CLAUSE_DECL (*gtask_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
16418 lang_hooks
.decls
.omp_finish_clause (*gtask_clauses_ptr
, NULL
,
16420 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
16421 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
) = 1;
16422 *gforo_clauses_ptr
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
16423 OMP_CLAUSE_PRIVATE
);
16424 OMP_CLAUSE_DECL (*gforo_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
16425 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr
) = 1;
16426 TREE_TYPE (*gforo_clauses_ptr
) = TREE_TYPE (c
);
16427 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr
);
16429 *gfor_clauses_ptr
= c
;
16430 gfor_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
16432 = build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_SHARED
);
16433 OMP_CLAUSE_DECL (*gtask_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
16434 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
16435 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr
) = 1;
16437 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
16439 /* Allocate clause we duplicate on task and inner taskloop
16440 if the decl is lastprivate, otherwise just put on task. */
16441 case OMP_CLAUSE_ALLOCATE
:
16442 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)
16443 && DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)))
16445 /* Additionally, put firstprivate clause on task
16446 for the allocator if it is not constant. */
16448 = build_omp_clause (OMP_CLAUSE_LOCATION (c
),
16449 OMP_CLAUSE_FIRSTPRIVATE
);
16450 OMP_CLAUSE_DECL (*gtask_clauses_ptr
)
16451 = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
);
16452 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
16454 if (lastprivate_uids
16455 && bitmap_bit_p (lastprivate_uids
,
16456 DECL_UID (OMP_CLAUSE_DECL (c
))))
16458 *gfor_clauses_ptr
= c
;
16459 gfor_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
16460 *gtask_clauses_ptr
= copy_node (c
);
16461 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
16465 *gtask_clauses_ptr
= c
;
16466 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
16470 gcc_unreachable ();
16472 *gfor_clauses_ptr
= NULL_TREE
;
16473 *gtask_clauses_ptr
= NULL_TREE
;
16474 *gforo_clauses_ptr
= NULL_TREE
;
16475 BITMAP_FREE (lastprivate_uids
);
16476 gimple_set_location (gfor
, input_location
);
16477 g
= gimple_build_bind (NULL_TREE
, gfor
, NULL_TREE
);
16478 g
= gimple_build_omp_task (g
, task_clauses
, NULL_TREE
, NULL_TREE
,
16479 NULL_TREE
, NULL_TREE
, NULL_TREE
);
16480 gimple_set_location (g
, input_location
);
16481 gimple_omp_task_set_taskloop_p (g
, true);
16482 g
= gimple_build_bind (NULL_TREE
, g
, NULL_TREE
);
16484 = gimple_build_omp_for (g
, GF_OMP_FOR_KIND_TASKLOOP
, outer_for_clauses
,
16485 gimple_omp_for_collapse (gfor
),
16486 gimple_omp_for_pre_body (gfor
));
16487 gimple_omp_for_set_pre_body (gfor
, NULL
);
16488 gimple_omp_for_set_combined_p (gforo
, true);
16489 gimple_omp_for_set_combined_into_p (gfor
, true);
16490 for (i
= 0; i
< (int) gimple_omp_for_collapse (gfor
); i
++)
16492 tree type
= TREE_TYPE (gimple_omp_for_index (gfor
, i
));
16493 tree v
= create_tmp_var (type
);
16494 gimple_omp_for_set_index (gforo
, i
, v
);
16495 t
= unshare_expr (gimple_omp_for_initial (gfor
, i
));
16496 gimple_omp_for_set_initial (gforo
, i
, t
);
16497 gimple_omp_for_set_cond (gforo
, i
,
16498 gimple_omp_for_cond (gfor
, i
));
16499 t
= unshare_expr (gimple_omp_for_final (gfor
, i
));
16500 gimple_omp_for_set_final (gforo
, i
, t
);
16501 t
= unshare_expr (gimple_omp_for_incr (gfor
, i
));
16502 gcc_assert (TREE_OPERAND (t
, 0) == gimple_omp_for_index (gfor
, i
));
16503 TREE_OPERAND (t
, 0) = v
;
16504 gimple_omp_for_set_incr (gforo
, i
, t
);
16505 t
= build_omp_clause (input_location
, OMP_CLAUSE_PRIVATE
);
16506 OMP_CLAUSE_DECL (t
) = v
;
16507 OMP_CLAUSE_CHAIN (t
) = gimple_omp_for_clauses (gforo
);
16508 gimple_omp_for_set_clauses (gforo
, t
);
16509 if (OMP_FOR_NON_RECTANGULAR (for_stmt
))
16511 tree
*p1
= NULL
, *p2
= NULL
;
16512 t
= gimple_omp_for_initial (gforo
, i
);
16513 if (TREE_CODE (t
) == TREE_VEC
)
16514 p1
= &TREE_VEC_ELT (t
, 0);
16515 t
= gimple_omp_for_final (gforo
, i
);
16516 if (TREE_CODE (t
) == TREE_VEC
)
16519 p2
= &TREE_VEC_ELT (t
, 0);
16521 p1
= &TREE_VEC_ELT (t
, 0);
16526 for (j
= 0; j
< i
; j
++)
16527 if (*p1
== gimple_omp_for_index (gfor
, j
))
16529 *p1
= gimple_omp_for_index (gforo
, j
);
16534 gcc_assert (j
< i
);
16538 gimplify_seq_add_stmt (pre_p
, gforo
);
16541 gimplify_seq_add_stmt (pre_p
, gfor
);
16543 if (TREE_CODE (orig_for_stmt
) == OMP_FOR
)
16545 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
16546 unsigned lastprivate_conditional
= 0;
16548 && (ctx
->region_type
== ORT_TARGET_DATA
16549 || ctx
->region_type
== ORT_TASKGROUP
))
16550 ctx
= ctx
->outer_context
;
16551 if (ctx
&& (ctx
->region_type
& ORT_PARALLEL
) != 0)
16552 for (tree c
= gimple_omp_for_clauses (gfor
);
16553 c
; c
= OMP_CLAUSE_CHAIN (c
))
16554 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
16555 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
16556 ++lastprivate_conditional
;
16557 if (lastprivate_conditional
)
16559 struct omp_for_data fd
;
16560 omp_extract_for_data (gfor
, &fd
, NULL
);
16561 tree type
= build_array_type_nelts (unsigned_type_for (fd
.iter_type
),
16562 lastprivate_conditional
);
16563 tree var
= create_tmp_var_raw (type
);
16564 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__CONDTEMP_
);
16565 OMP_CLAUSE_DECL (c
) = var
;
16566 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (gfor
);
16567 gimple_omp_for_set_clauses (gfor
, c
);
16568 omp_add_variable (ctx
, var
, GOVD_CONDTEMP
| GOVD_SEEN
);
16571 else if (TREE_CODE (orig_for_stmt
) == OMP_SIMD
)
16573 unsigned lastprivate_conditional
= 0;
16574 for (tree c
= gimple_omp_for_clauses (gfor
); c
; c
= OMP_CLAUSE_CHAIN (c
))
16575 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
16576 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
16577 ++lastprivate_conditional
;
16578 if (lastprivate_conditional
)
16580 struct omp_for_data fd
;
16581 omp_extract_for_data (gfor
, &fd
, NULL
);
16582 tree type
= unsigned_type_for (fd
.iter_type
);
16583 while (lastprivate_conditional
--)
16585 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
16586 OMP_CLAUSE__CONDTEMP_
);
16587 OMP_CLAUSE_DECL (c
) = create_tmp_var (type
);
16588 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (gfor
);
16589 gimple_omp_for_set_clauses (gfor
, c
);
16594 if (ret
!= GS_ALL_DONE
)
16596 *expr_p
= NULL_TREE
;
16597 return GS_ALL_DONE
;
16600 /* Helper for gimplify_omp_loop, called through walk_tree. */
16603 note_no_context_vars (tree
*tp
, int *, void *data
)
16606 && DECL_CONTEXT (*tp
) == NULL_TREE
16607 && !is_global_var (*tp
))
16609 vec
<tree
> *d
= (vec
<tree
> *) data
;
16610 d
->safe_push (*tp
);
16611 DECL_CONTEXT (*tp
) = current_function_decl
;
16616 /* Gimplify the gross structure of an OMP_LOOP statement. */
16618 static enum gimplify_status
16619 gimplify_omp_loop (tree
*expr_p
, gimple_seq
*pre_p
)
16621 tree for_stmt
= *expr_p
;
16622 tree clauses
= OMP_FOR_CLAUSES (for_stmt
);
16623 struct gimplify_omp_ctx
*octx
= gimplify_omp_ctxp
;
16624 enum omp_clause_bind_kind kind
= OMP_CLAUSE_BIND_THREAD
;
16627 omp_maybe_apply_loop_xforms (expr_p
, NULL_TREE
);
16628 if (*expr_p
!= for_stmt
)
16631 /* If order is not present, the behavior is as if order(concurrent)
16633 tree order
= omp_find_clause (clauses
, OMP_CLAUSE_ORDER
);
16634 if (order
== NULL_TREE
)
16636 order
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_ORDER
);
16637 OMP_CLAUSE_CHAIN (order
) = clauses
;
16638 OMP_FOR_CLAUSES (for_stmt
) = clauses
= order
;
16641 tree bind
= omp_find_clause (clauses
, OMP_CLAUSE_BIND
);
16642 if (bind
== NULL_TREE
)
16644 if (!flag_openmp
) /* flag_openmp_simd */
16646 else if (octx
&& (octx
->region_type
& ORT_TEAMS
) != 0)
16647 kind
= OMP_CLAUSE_BIND_TEAMS
;
16648 else if (octx
&& (octx
->region_type
& ORT_PARALLEL
) != 0)
16649 kind
= OMP_CLAUSE_BIND_PARALLEL
;
16652 for (; octx
; octx
= octx
->outer_context
)
16654 if ((octx
->region_type
& ORT_ACC
) != 0
16655 || octx
->region_type
== ORT_NONE
16656 || octx
->region_type
== ORT_IMPLICIT_TARGET
)
16660 if (octx
== NULL
&& !in_omp_construct
)
16661 error_at (EXPR_LOCATION (for_stmt
),
16662 "%<bind%> clause not specified on a %<loop%> "
16663 "construct not nested inside another OpenMP construct");
16665 bind
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_BIND
);
16666 OMP_CLAUSE_CHAIN (bind
) = clauses
;
16667 OMP_CLAUSE_BIND_KIND (bind
) = kind
;
16668 OMP_FOR_CLAUSES (for_stmt
) = bind
;
16671 switch (OMP_CLAUSE_BIND_KIND (bind
))
16673 case OMP_CLAUSE_BIND_THREAD
:
16675 case OMP_CLAUSE_BIND_PARALLEL
:
16676 if (!flag_openmp
) /* flag_openmp_simd */
16678 OMP_CLAUSE_BIND_KIND (bind
) = OMP_CLAUSE_BIND_THREAD
;
16681 for (; octx
; octx
= octx
->outer_context
)
16682 if (octx
->region_type
== ORT_SIMD
16683 && omp_find_clause (octx
->clauses
, OMP_CLAUSE_BIND
) == NULL_TREE
)
16685 error_at (EXPR_LOCATION (for_stmt
),
16686 "%<bind(parallel)%> on a %<loop%> construct nested "
16687 "inside %<simd%> construct");
16688 OMP_CLAUSE_BIND_KIND (bind
) = OMP_CLAUSE_BIND_THREAD
;
16691 kind
= OMP_CLAUSE_BIND_PARALLEL
;
16693 case OMP_CLAUSE_BIND_TEAMS
:
16694 if (!flag_openmp
) /* flag_openmp_simd */
16696 OMP_CLAUSE_BIND_KIND (bind
) = OMP_CLAUSE_BIND_THREAD
;
16700 && octx
->region_type
!= ORT_IMPLICIT_TARGET
16701 && octx
->region_type
!= ORT_NONE
16702 && (octx
->region_type
& ORT_TEAMS
) == 0)
16703 || in_omp_construct
)
16705 error_at (EXPR_LOCATION (for_stmt
),
16706 "%<bind(teams)%> on a %<loop%> region not strictly "
16707 "nested inside of a %<teams%> region");
16708 OMP_CLAUSE_BIND_KIND (bind
) = OMP_CLAUSE_BIND_THREAD
;
16711 kind
= OMP_CLAUSE_BIND_TEAMS
;
16714 gcc_unreachable ();
16717 for (tree
*pc
= &OMP_FOR_CLAUSES (for_stmt
); *pc
; )
16718 switch (OMP_CLAUSE_CODE (*pc
))
16720 case OMP_CLAUSE_REDUCTION
:
16721 if (OMP_CLAUSE_REDUCTION_INSCAN (*pc
))
16723 error_at (OMP_CLAUSE_LOCATION (*pc
),
16724 "%<inscan%> %<reduction%> clause on "
16725 "%qs construct", "loop");
16726 OMP_CLAUSE_REDUCTION_INSCAN (*pc
) = 0;
16728 if (OMP_CLAUSE_REDUCTION_TASK (*pc
))
16730 error_at (OMP_CLAUSE_LOCATION (*pc
),
16731 "invalid %<task%> reduction modifier on construct "
16732 "other than %<parallel%>, %qs or %<sections%>",
16733 lang_GNU_Fortran () ? "do" : "for");
16734 OMP_CLAUSE_REDUCTION_TASK (*pc
) = 0;
16736 pc
= &OMP_CLAUSE_CHAIN (*pc
);
16738 case OMP_CLAUSE_LASTPRIVATE
:
16739 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
16741 tree t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
16742 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
16743 if (OMP_CLAUSE_DECL (*pc
) == TREE_OPERAND (t
, 0))
16745 if (OMP_FOR_ORIG_DECLS (for_stmt
)
16746 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
),
16748 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
),
16751 tree orig
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
16752 if (OMP_CLAUSE_DECL (*pc
) == TREE_PURPOSE (orig
))
16756 if (i
== TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)))
16758 error_at (OMP_CLAUSE_LOCATION (*pc
),
16759 "%<lastprivate%> clause on a %<loop%> construct refers "
16760 "to a variable %qD which is not the loop iterator",
16761 OMP_CLAUSE_DECL (*pc
));
16762 *pc
= OMP_CLAUSE_CHAIN (*pc
);
16765 pc
= &OMP_CLAUSE_CHAIN (*pc
);
16768 pc
= &OMP_CLAUSE_CHAIN (*pc
);
16772 TREE_SET_CODE (for_stmt
, OMP_SIMD
);
16777 case OMP_CLAUSE_BIND_THREAD
: last
= 0; break;
16778 case OMP_CLAUSE_BIND_PARALLEL
: last
= 1; break;
16779 case OMP_CLAUSE_BIND_TEAMS
: last
= 2; break;
16781 for (int pass
= 1; pass
<= last
; pass
++)
16785 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, NULL
,
16786 make_node (BLOCK
));
16787 append_to_statement_list (*expr_p
, &BIND_EXPR_BODY (bind
));
16788 *expr_p
= make_node (OMP_PARALLEL
);
16789 TREE_TYPE (*expr_p
) = void_type_node
;
16790 OMP_PARALLEL_BODY (*expr_p
) = bind
;
16791 OMP_PARALLEL_COMBINED (*expr_p
) = 1;
16792 SET_EXPR_LOCATION (*expr_p
, EXPR_LOCATION (for_stmt
));
16793 tree
*pc
= &OMP_PARALLEL_CLAUSES (*expr_p
);
16794 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
16795 if (OMP_FOR_ORIG_DECLS (for_stmt
)
16796 && (TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
))
16799 tree elt
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
16800 if (TREE_PURPOSE (elt
) && TREE_VALUE (elt
))
16802 *pc
= build_omp_clause (UNKNOWN_LOCATION
,
16803 OMP_CLAUSE_FIRSTPRIVATE
);
16804 OMP_CLAUSE_DECL (*pc
) = TREE_VALUE (elt
);
16805 pc
= &OMP_CLAUSE_CHAIN (*pc
);
16809 tree t
= make_node (pass
== 2 ? OMP_DISTRIBUTE
: OMP_FOR
);
16810 tree
*pc
= &OMP_FOR_CLAUSES (t
);
16811 TREE_TYPE (t
) = void_type_node
;
16812 OMP_FOR_BODY (t
) = *expr_p
;
16813 SET_EXPR_LOCATION (t
, EXPR_LOCATION (for_stmt
));
16814 for (tree c
= OMP_FOR_CLAUSES (for_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
16815 switch (OMP_CLAUSE_CODE (c
))
16817 case OMP_CLAUSE_BIND
:
16818 case OMP_CLAUSE_ORDER
:
16819 case OMP_CLAUSE_COLLAPSE
:
16820 *pc
= copy_node (c
);
16821 pc
= &OMP_CLAUSE_CHAIN (*pc
);
16823 case OMP_CLAUSE_PRIVATE
:
16824 case OMP_CLAUSE_FIRSTPRIVATE
:
16825 /* Only needed on innermost. */
16827 case OMP_CLAUSE_LASTPRIVATE
:
16828 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
) && pass
!= last
)
16830 *pc
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
16831 OMP_CLAUSE_FIRSTPRIVATE
);
16832 OMP_CLAUSE_DECL (*pc
) = OMP_CLAUSE_DECL (c
);
16833 lang_hooks
.decls
.omp_finish_clause (*pc
, NULL
, false);
16834 pc
= &OMP_CLAUSE_CHAIN (*pc
);
16836 *pc
= copy_node (c
);
16837 OMP_CLAUSE_LASTPRIVATE_STMT (*pc
) = NULL_TREE
;
16838 TREE_TYPE (*pc
) = unshare_expr (TREE_TYPE (c
));
16839 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
))
16842 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (*pc
) = 1;
16844 lang_hooks
.decls
.omp_finish_clause (*pc
, NULL
, false);
16845 OMP_CLAUSE_LASTPRIVATE_LOOP_IV (*pc
) = 0;
16847 pc
= &OMP_CLAUSE_CHAIN (*pc
);
16849 case OMP_CLAUSE_REDUCTION
:
16850 *pc
= copy_node (c
);
16851 OMP_CLAUSE_DECL (*pc
) = unshare_expr (OMP_CLAUSE_DECL (c
));
16852 TREE_TYPE (*pc
) = unshare_expr (TREE_TYPE (c
));
16853 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc
))
16855 auto_vec
<tree
> no_context_vars
;
16856 int walk_subtrees
= 0;
16857 note_no_context_vars (&OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
),
16858 &walk_subtrees
, &no_context_vars
);
16859 if (tree p
= OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
))
16860 note_no_context_vars (&p
, &walk_subtrees
, &no_context_vars
);
16861 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_INIT (c
),
16862 note_no_context_vars
,
16864 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_MERGE (c
),
16865 note_no_context_vars
,
16868 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc
)
16869 = copy_node (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
));
16870 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc
))
16871 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc
)
16872 = copy_node (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
));
16874 hash_map
<tree
, tree
> decl_map
;
16875 decl_map
.put (OMP_CLAUSE_DECL (c
), OMP_CLAUSE_DECL (c
));
16876 decl_map
.put (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
),
16877 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc
));
16878 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc
))
16879 decl_map
.put (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
),
16880 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc
));
16883 memset (&id
, 0, sizeof (id
));
16884 id
.src_fn
= current_function_decl
;
16885 id
.dst_fn
= current_function_decl
;
16886 id
.src_cfun
= cfun
;
16887 id
.decl_map
= &decl_map
;
16888 id
.copy_decl
= copy_decl_no_change
;
16889 id
.transform_call_graph_edges
= CB_CGE_DUPLICATE
;
16890 id
.transform_new_cfg
= true;
16891 id
.transform_return_to_modify
= false;
16893 walk_tree (&OMP_CLAUSE_REDUCTION_INIT (*pc
), copy_tree_body_r
,
16895 walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (*pc
), copy_tree_body_r
,
16898 for (tree d
: no_context_vars
)
16900 DECL_CONTEXT (d
) = NULL_TREE
;
16901 DECL_CONTEXT (*decl_map
.get (d
)) = NULL_TREE
;
16906 OMP_CLAUSE_REDUCTION_INIT (*pc
)
16907 = unshare_expr (OMP_CLAUSE_REDUCTION_INIT (c
));
16908 OMP_CLAUSE_REDUCTION_MERGE (*pc
)
16909 = unshare_expr (OMP_CLAUSE_REDUCTION_MERGE (c
));
16911 pc
= &OMP_CLAUSE_CHAIN (*pc
);
16914 gcc_unreachable ();
16919 return gimplify_expr (expr_p
, pre_p
, NULL
, is_gimple_stmt
, fb_none
);
16923 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
16924 of OMP_TARGET's body. */
16927 find_omp_teams (tree
*tp
, int *walk_subtrees
, void *)
16929 *walk_subtrees
= 0;
16930 switch (TREE_CODE (*tp
))
16935 case STATEMENT_LIST
:
16936 *walk_subtrees
= 1;
16944 /* Helper function of optimize_target_teams, determine if the expression
16945 can be computed safely before the target construct on the host. */
16948 computable_teams_clause (tree
*tp
, int *walk_subtrees
, void *)
16954 *walk_subtrees
= 0;
16957 switch (TREE_CODE (*tp
))
16962 *walk_subtrees
= 0;
16963 if (error_operand_p (*tp
)
16964 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp
))
16965 || DECL_HAS_VALUE_EXPR_P (*tp
)
16966 || DECL_THREAD_LOCAL_P (*tp
)
16967 || TREE_SIDE_EFFECTS (*tp
)
16968 || TREE_THIS_VOLATILE (*tp
))
16970 if (is_global_var (*tp
)
16971 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp
))
16972 || lookup_attribute ("omp declare target link",
16973 DECL_ATTRIBUTES (*tp
))))
16976 && !DECL_SEEN_IN_BIND_EXPR_P (*tp
)
16977 && !is_global_var (*tp
)
16978 && decl_function_context (*tp
) == current_function_decl
)
16980 n
= splay_tree_lookup (gimplify_omp_ctxp
->variables
,
16981 (splay_tree_key
) *tp
);
16984 if (gimplify_omp_ctxp
->defaultmap
[GDMK_SCALAR
] & GOVD_FIRSTPRIVATE
)
16988 else if (n
->value
& GOVD_LOCAL
)
16990 else if (n
->value
& GOVD_FIRSTPRIVATE
)
16992 else if ((n
->value
& (GOVD_MAP
| GOVD_MAP_ALWAYS_TO
))
16993 == (GOVD_MAP
| GOVD_MAP_ALWAYS_TO
))
16997 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp
)))
17001 if (TARGET_EXPR_INITIAL (*tp
)
17002 || TREE_CODE (TARGET_EXPR_SLOT (*tp
)) != VAR_DECL
)
17004 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp
),
17005 walk_subtrees
, NULL
);
17006 /* Allow some reasonable subset of integral arithmetics. */
17010 case TRUNC_DIV_EXPR
:
17011 case CEIL_DIV_EXPR
:
17012 case FLOOR_DIV_EXPR
:
17013 case ROUND_DIV_EXPR
:
17014 case TRUNC_MOD_EXPR
:
17015 case CEIL_MOD_EXPR
:
17016 case FLOOR_MOD_EXPR
:
17017 case ROUND_MOD_EXPR
:
17019 case EXACT_DIV_EXPR
:
17030 case NON_LVALUE_EXPR
:
17032 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp
)))
17035 /* And disallow anything else, except for comparisons. */
17037 if (COMPARISON_CLASS_P (*tp
))
17043 /* Try to determine if the num_teams and/or thread_limit expressions
17044 can have their values determined already before entering the
17046 INTEGER_CSTs trivially are,
17047 integral decls that are firstprivate (explicitly or implicitly)
17048 or explicitly map(always, to:) or map(always, tofrom:) on the target
17049 region too, and expressions involving simple arithmetics on those
17050 too, function calls are not ok, dereferencing something neither etc.
17051 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
17052 EXPR based on what we find:
17053 0 stands for clause not specified at all, use implementation default
17054 -1 stands for value that can't be determined easily before entering
17055 the target construct.
17056 -2 means that no explicit teams construct was specified
17057 If teams construct is not present at all, use 1 for num_teams
17058 and 0 for thread_limit (only one team is involved, and the thread
17059 limit is implementation defined. */
17062 optimize_target_teams (tree target
, gimple_seq
*pre_p
)
17064 tree body
= OMP_BODY (target
);
17065 tree teams
= walk_tree (&body
, find_omp_teams
, NULL
, NULL
);
17066 tree num_teams_lower
= NULL_TREE
;
17067 tree num_teams_upper
= integer_zero_node
;
17068 tree thread_limit
= integer_zero_node
;
17069 location_t num_teams_loc
= EXPR_LOCATION (target
);
17070 location_t thread_limit_loc
= EXPR_LOCATION (target
);
17072 struct gimplify_omp_ctx
*target_ctx
= gimplify_omp_ctxp
;
17074 if (teams
== NULL_TREE
)
17075 num_teams_upper
= build_int_cst (integer_type_node
, -2);
17077 for (c
= OMP_TEAMS_CLAUSES (teams
); c
; c
= OMP_CLAUSE_CHAIN (c
))
17079 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_NUM_TEAMS
)
17081 p
= &num_teams_upper
;
17082 num_teams_loc
= OMP_CLAUSE_LOCATION (c
);
17083 if (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
))
17085 expr
= OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
);
17086 if (TREE_CODE (expr
) == INTEGER_CST
)
17087 num_teams_lower
= expr
;
17088 else if (walk_tree (&expr
, computable_teams_clause
,
17090 num_teams_lower
= integer_minus_one_node
;
17093 num_teams_lower
= expr
;
17094 gimplify_omp_ctxp
= gimplify_omp_ctxp
->outer_context
;
17095 if (gimplify_expr (&num_teams_lower
, pre_p
, NULL
,
17096 is_gimple_val
, fb_rvalue
, false)
17099 gimplify_omp_ctxp
= target_ctx
;
17100 num_teams_lower
= integer_minus_one_node
;
17104 gimplify_omp_ctxp
= target_ctx
;
17105 if (!DECL_P (expr
) && TREE_CODE (expr
) != TARGET_EXPR
)
17106 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
)
17112 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_THREAD_LIMIT
)
17115 thread_limit_loc
= OMP_CLAUSE_LOCATION (c
);
17119 expr
= OMP_CLAUSE_OPERAND (c
, 0);
17120 if (TREE_CODE (expr
) == INTEGER_CST
)
17125 if (walk_tree (&expr
, computable_teams_clause
, NULL
, NULL
))
17127 *p
= integer_minus_one_node
;
17131 gimplify_omp_ctxp
= gimplify_omp_ctxp
->outer_context
;
17132 if (gimplify_expr (p
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
, false)
17135 gimplify_omp_ctxp
= target_ctx
;
17136 *p
= integer_minus_one_node
;
17139 gimplify_omp_ctxp
= target_ctx
;
17140 if (!DECL_P (expr
) && TREE_CODE (expr
) != TARGET_EXPR
)
17141 OMP_CLAUSE_OPERAND (c
, 0) = *p
;
17143 if (!omp_find_clause (OMP_TARGET_CLAUSES (target
), OMP_CLAUSE_THREAD_LIMIT
))
17145 c
= build_omp_clause (thread_limit_loc
, OMP_CLAUSE_THREAD_LIMIT
);
17146 OMP_CLAUSE_THREAD_LIMIT_EXPR (c
) = thread_limit
;
17147 OMP_CLAUSE_CHAIN (c
) = OMP_TARGET_CLAUSES (target
);
17148 OMP_TARGET_CLAUSES (target
) = c
;
17150 c
= build_omp_clause (num_teams_loc
, OMP_CLAUSE_NUM_TEAMS
);
17151 OMP_CLAUSE_NUM_TEAMS_UPPER_EXPR (c
) = num_teams_upper
;
17152 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
) = num_teams_lower
;
17153 OMP_CLAUSE_CHAIN (c
) = OMP_TARGET_CLAUSES (target
);
17154 OMP_TARGET_CLAUSES (target
) = c
;
17157 /* Gimplify the gross structure of several OMP constructs. */
17160 gimplify_omp_workshare (tree
*expr_p
, gimple_seq
*pre_p
)
17162 tree expr
= *expr_p
;
17164 gimple_seq body
= NULL
;
17165 enum omp_region_type ort
;
17167 switch (TREE_CODE (expr
))
17171 ort
= ORT_WORKSHARE
;
17174 ort
= ORT_TASKGROUP
;
17177 ort
= OMP_TARGET_COMBINED (expr
) ? ORT_COMBINED_TARGET
: ORT_TARGET
;
17180 ort
= ORT_ACC_KERNELS
;
17182 case OACC_PARALLEL
:
17183 ort
= ORT_ACC_PARALLEL
;
17186 ort
= ORT_ACC_SERIAL
;
17189 ort
= ORT_ACC_DATA
;
17191 case OMP_TARGET_DATA
:
17192 ort
= ORT_TARGET_DATA
;
17195 ort
= OMP_TEAMS_COMBINED (expr
) ? ORT_COMBINED_TEAMS
: ORT_TEAMS
;
17196 if (gimplify_omp_ctxp
== NULL
17197 || gimplify_omp_ctxp
->region_type
== ORT_IMPLICIT_TARGET
)
17198 ort
= (enum omp_region_type
) (ort
| ORT_HOST_TEAMS
);
17200 case OACC_HOST_DATA
:
17201 ort
= ORT_ACC_HOST_DATA
;
17204 gcc_unreachable ();
17207 bool save_in_omp_construct
= in_omp_construct
;
17208 if ((ort
& ORT_ACC
) == 0)
17209 in_omp_construct
= false;
17210 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr
), pre_p
, ort
,
17212 if (TREE_CODE (expr
) == OMP_TARGET
)
17213 optimize_target_teams (expr
, pre_p
);
17214 if ((ort
& (ORT_TARGET
| ORT_TARGET_DATA
)) != 0
17215 || (ort
& ORT_HOST_TEAMS
) == ORT_HOST_TEAMS
)
17217 push_gimplify_context ();
17218 gimple
*g
= gimplify_and_return_first (OMP_BODY (expr
), &body
);
17219 if (gimple_code (g
) == GIMPLE_BIND
)
17220 pop_gimplify_context (g
);
17222 pop_gimplify_context (NULL
);
17223 if ((ort
& ORT_TARGET_DATA
) != 0)
17225 enum built_in_function end_ix
;
17226 switch (TREE_CODE (expr
))
17229 case OACC_HOST_DATA
:
17230 end_ix
= BUILT_IN_GOACC_DATA_END
;
17232 case OMP_TARGET_DATA
:
17233 end_ix
= BUILT_IN_GOMP_TARGET_END_DATA
;
17236 gcc_unreachable ();
17238 tree fn
= builtin_decl_explicit (end_ix
);
17239 g
= gimple_build_call (fn
, 0);
17240 gimple_seq cleanup
= NULL
;
17241 gimple_seq_add_stmt (&cleanup
, g
);
17242 g
= gimple_build_try (body
, cleanup
, GIMPLE_TRY_FINALLY
);
17244 gimple_seq_add_stmt (&body
, g
);
17248 gimplify_and_add (OMP_BODY (expr
), &body
);
17249 gimplify_adjust_omp_clauses (pre_p
, body
, &OMP_CLAUSES (expr
),
17251 in_omp_construct
= save_in_omp_construct
;
17253 switch (TREE_CODE (expr
))
17256 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_DATA
,
17257 OMP_CLAUSES (expr
));
17259 case OACC_HOST_DATA
:
17260 if (omp_find_clause (OMP_CLAUSES (expr
), OMP_CLAUSE_IF_PRESENT
))
17262 for (tree c
= OMP_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
17263 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_PTR
)
17264 OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c
) = 1;
17267 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_HOST_DATA
,
17268 OMP_CLAUSES (expr
));
17271 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_KERNELS
,
17272 OMP_CLAUSES (expr
));
17274 case OACC_PARALLEL
:
17275 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_PARALLEL
,
17276 OMP_CLAUSES (expr
));
17279 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_SERIAL
,
17280 OMP_CLAUSES (expr
));
17283 stmt
= gimple_build_omp_sections (body
, OMP_CLAUSES (expr
));
17286 stmt
= gimple_build_omp_single (body
, OMP_CLAUSES (expr
));
17289 stmt
= gimple_build_omp_scope (body
, OMP_CLAUSES (expr
));
17292 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_REGION
,
17293 OMP_CLAUSES (expr
));
17295 case OMP_TARGET_DATA
:
17296 /* Put use_device_{ptr,addr} clauses last, as map clauses are supposed
17297 to be evaluated before the use_device_{ptr,addr} clauses if they
17298 refer to the same variables. */
17300 tree use_device_clauses
;
17301 tree
*pc
, *uc
= &use_device_clauses
;
17302 for (pc
= &OMP_CLAUSES (expr
); *pc
; )
17303 if (OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_USE_DEVICE_PTR
17304 || OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_USE_DEVICE_ADDR
)
17307 *pc
= OMP_CLAUSE_CHAIN (*pc
);
17308 uc
= &OMP_CLAUSE_CHAIN (*uc
);
17311 pc
= &OMP_CLAUSE_CHAIN (*pc
);
17313 *pc
= use_device_clauses
;
17314 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_DATA
,
17315 OMP_CLAUSES (expr
));
17319 stmt
= gimple_build_omp_teams (body
, OMP_CLAUSES (expr
));
17320 if ((ort
& ORT_HOST_TEAMS
) == ORT_HOST_TEAMS
)
17321 gimple_omp_teams_set_host (as_a
<gomp_teams
*> (stmt
), true);
17324 gcc_unreachable ();
17327 gimplify_seq_add_stmt (pre_p
, stmt
);
17328 *expr_p
= NULL_TREE
;
17331 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
17332 target update constructs. */
17335 gimplify_omp_target_update (tree
*expr_p
, gimple_seq
*pre_p
)
17337 tree expr
= *expr_p
;
17340 enum omp_region_type ort
= ORT_WORKSHARE
;
17342 switch (TREE_CODE (expr
))
17344 case OACC_ENTER_DATA
:
17345 kind
= GF_OMP_TARGET_KIND_OACC_ENTER_DATA
;
17348 case OACC_EXIT_DATA
:
17349 kind
= GF_OMP_TARGET_KIND_OACC_EXIT_DATA
;
17353 kind
= GF_OMP_TARGET_KIND_OACC_UPDATE
;
17356 case OMP_TARGET_UPDATE
:
17357 kind
= GF_OMP_TARGET_KIND_UPDATE
;
17359 case OMP_TARGET_ENTER_DATA
:
17360 kind
= GF_OMP_TARGET_KIND_ENTER_DATA
;
17362 case OMP_TARGET_EXIT_DATA
:
17363 kind
= GF_OMP_TARGET_KIND_EXIT_DATA
;
17366 gcc_unreachable ();
17368 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr
), pre_p
,
17369 ort
, TREE_CODE (expr
));
17370 gimplify_adjust_omp_clauses (pre_p
, NULL
, &OMP_STANDALONE_CLAUSES (expr
),
17372 if (TREE_CODE (expr
) == OACC_UPDATE
17373 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr
),
17374 OMP_CLAUSE_IF_PRESENT
))
17376 /* The runtime uses GOMP_MAP_{TO,FROM} to denote the if_present
17378 for (tree c
= OMP_STANDALONE_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
17379 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
)
17380 switch (OMP_CLAUSE_MAP_KIND (c
))
17382 case GOMP_MAP_FORCE_TO
:
17383 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_TO
);
17385 case GOMP_MAP_FORCE_FROM
:
17386 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_FROM
);
17392 else if (TREE_CODE (expr
) == OACC_EXIT_DATA
17393 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr
),
17394 OMP_CLAUSE_FINALIZE
))
17396 /* Use GOMP_MAP_DELETE/GOMP_MAP_FORCE_FROM to denote "finalize"
17398 bool have_clause
= false;
17399 for (tree c
= OMP_STANDALONE_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
17400 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
)
17401 switch (OMP_CLAUSE_MAP_KIND (c
))
17403 case GOMP_MAP_FROM
:
17404 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_FORCE_FROM
);
17405 have_clause
= true;
17407 case GOMP_MAP_RELEASE
:
17408 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_DELETE
);
17409 have_clause
= true;
17411 case GOMP_MAP_TO_PSET
:
17412 /* Fortran arrays with descriptors must map that descriptor when
17413 doing standalone "attach" operations (in OpenACC). In that
17414 case GOMP_MAP_TO_PSET appears by itself with no preceding
17415 clause (see trans-openmp.cc:gfc_trans_omp_clauses). */
17417 case GOMP_MAP_POINTER
:
17418 /* TODO PR92929: we may see these here, but they'll always follow
17419 one of the clauses above, and will be handled by libgomp as
17420 one group, so no handling required here. */
17421 gcc_assert (have_clause
);
17423 case GOMP_MAP_DETACH
:
17424 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_FORCE_DETACH
);
17425 have_clause
= false;
17427 case GOMP_MAP_STRUCT
:
17428 case GOMP_MAP_STRUCT_UNORD
:
17429 have_clause
= false;
17432 gcc_unreachable ();
17435 stmt
= gimple_build_omp_target (NULL
, kind
, OMP_STANDALONE_CLAUSES (expr
));
17437 gimplify_seq_add_stmt (pre_p
, stmt
);
17438 *expr_p
= NULL_TREE
;
17441 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
17442 stabilized the lhs of the atomic operation as *ADDR. Return true if
17443 EXPR is this stabilized form. */
17446 goa_lhs_expr_p (tree expr
, tree addr
)
17448 /* Also include casts to other type variants. The C front end is fond
17449 of adding these for e.g. volatile variables. This is like
17450 STRIP_TYPE_NOPS but includes the main variant lookup. */
17451 STRIP_USELESS_TYPE_CONVERSION (expr
);
17453 if (INDIRECT_REF_P (expr
))
17455 expr
= TREE_OPERAND (expr
, 0);
17456 while (expr
!= addr
17457 && (CONVERT_EXPR_P (expr
)
17458 || TREE_CODE (expr
) == NON_LVALUE_EXPR
)
17459 && TREE_CODE (expr
) == TREE_CODE (addr
)
17460 && types_compatible_p (TREE_TYPE (expr
), TREE_TYPE (addr
)))
17462 expr
= TREE_OPERAND (expr
, 0);
17463 addr
= TREE_OPERAND (addr
, 0);
17467 return (TREE_CODE (addr
) == ADDR_EXPR
17468 && TREE_CODE (expr
) == ADDR_EXPR
17469 && TREE_OPERAND (addr
, 0) == TREE_OPERAND (expr
, 0));
17471 if (TREE_CODE (addr
) == ADDR_EXPR
&& expr
== TREE_OPERAND (addr
, 0))
17476 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
17477 expression does not involve the lhs, evaluate it into a temporary.
17478 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
17479 or -1 if an error was encountered. */
17482 goa_stabilize_expr (tree
*expr_p
, gimple_seq
*pre_p
, tree lhs_addr
,
17483 tree lhs_var
, tree
&target_expr
, bool rhs
, int depth
)
17485 tree expr
= *expr_p
;
17488 if (goa_lhs_expr_p (expr
, lhs_addr
))
17494 if (is_gimple_val (expr
))
17497 /* Maximum depth of lhs in expression is for the
17498 __builtin_clear_padding (...), __builtin_clear_padding (...),
17499 __builtin_memcmp (&TARGET_EXPR <lhs, >, ...) == 0 ? ... : lhs; */
17503 switch (TREE_CODE_CLASS (TREE_CODE (expr
)))
17506 case tcc_comparison
:
17507 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1), pre_p
, lhs_addr
,
17508 lhs_var
, target_expr
, true, depth
);
17511 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
, lhs_addr
,
17512 lhs_var
, target_expr
, true, depth
);
17514 case tcc_expression
:
17515 switch (TREE_CODE (expr
))
17517 case TRUTH_ANDIF_EXPR
:
17518 case TRUTH_ORIF_EXPR
:
17519 case TRUTH_AND_EXPR
:
17520 case TRUTH_OR_EXPR
:
17521 case TRUTH_XOR_EXPR
:
17522 case BIT_INSERT_EXPR
:
17523 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1), pre_p
,
17524 lhs_addr
, lhs_var
, target_expr
, true,
17527 case TRUTH_NOT_EXPR
:
17528 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
17529 lhs_addr
, lhs_var
, target_expr
, true,
17533 if (pre_p
&& !goa_stabilize_expr (expr_p
, NULL
, lhs_addr
, lhs_var
,
17534 target_expr
, true, depth
))
17536 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1), pre_p
,
17537 lhs_addr
, lhs_var
, target_expr
, true,
17539 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
17540 lhs_addr
, lhs_var
, target_expr
, false,
17545 if (pre_p
&& !goa_stabilize_expr (expr_p
, NULL
, lhs_addr
, lhs_var
,
17546 target_expr
, true, depth
))
17548 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
17549 lhs_addr
, lhs_var
, target_expr
, false,
17552 case COMPOUND_EXPR
:
17553 /* Break out any preevaluations from cp_build_modify_expr. */
17554 for (; TREE_CODE (expr
) == COMPOUND_EXPR
;
17555 expr
= TREE_OPERAND (expr
, 1))
17557 /* Special-case __builtin_clear_padding call before
17558 __builtin_memcmp. */
17559 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == CALL_EXPR
)
17561 tree fndecl
= get_callee_fndecl (TREE_OPERAND (expr
, 0));
17563 && fndecl_built_in_p (fndecl
, BUILT_IN_CLEAR_PADDING
)
17564 && VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (expr
, 0)))
17566 || goa_stabilize_expr (&TREE_OPERAND (expr
, 0), NULL
,
17568 target_expr
, true, depth
)))
17572 saw_lhs
= goa_stabilize_expr (&TREE_OPERAND (expr
, 0),
17573 pre_p
, lhs_addr
, lhs_var
,
17574 target_expr
, true, depth
);
17575 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1),
17576 pre_p
, lhs_addr
, lhs_var
,
17577 target_expr
, rhs
, depth
);
17583 gimplify_stmt (&TREE_OPERAND (expr
, 0), pre_p
);
17586 return goa_stabilize_expr (&expr
, pre_p
, lhs_addr
, lhs_var
,
17587 target_expr
, rhs
, depth
);
17589 return goa_stabilize_expr (expr_p
, pre_p
, lhs_addr
, lhs_var
,
17590 target_expr
, rhs
, depth
);
17592 if (!goa_stabilize_expr (&TREE_OPERAND (expr
, 0), NULL
, lhs_addr
,
17593 lhs_var
, target_expr
, true, depth
))
17595 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
17596 lhs_addr
, lhs_var
, target_expr
, true,
17598 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1), pre_p
,
17599 lhs_addr
, lhs_var
, target_expr
, true,
17601 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 2), pre_p
,
17602 lhs_addr
, lhs_var
, target_expr
, true,
17606 if (TARGET_EXPR_INITIAL (expr
))
17608 if (pre_p
&& !goa_stabilize_expr (expr_p
, NULL
, lhs_addr
,
17609 lhs_var
, target_expr
, true,
17612 if (expr
== target_expr
)
17616 saw_lhs
= goa_stabilize_expr (&TARGET_EXPR_INITIAL (expr
),
17617 pre_p
, lhs_addr
, lhs_var
,
17618 target_expr
, true, depth
);
17619 if (saw_lhs
&& target_expr
== NULL_TREE
&& pre_p
)
17620 target_expr
= expr
;
17628 case tcc_reference
:
17629 if (TREE_CODE (expr
) == BIT_FIELD_REF
17630 || TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
17631 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
17632 lhs_addr
, lhs_var
, target_expr
, true,
17636 if (TREE_CODE (expr
) == CALL_EXPR
)
17638 if (tree fndecl
= get_callee_fndecl (expr
))
17639 if (fndecl_built_in_p (fndecl
, BUILT_IN_CLEAR_PADDING
,
17642 int nargs
= call_expr_nargs (expr
);
17643 for (int i
= 0; i
< nargs
; i
++)
17644 saw_lhs
|= goa_stabilize_expr (&CALL_EXPR_ARG (expr
, i
),
17645 pre_p
, lhs_addr
, lhs_var
,
17646 target_expr
, true, depth
);
17655 if (saw_lhs
== 0 && pre_p
)
17657 enum gimplify_status gs
;
17658 if (TREE_CODE (expr
) == CALL_EXPR
&& VOID_TYPE_P (TREE_TYPE (expr
)))
17660 gimplify_stmt (&expr
, pre_p
);
17664 gs
= gimplify_expr (expr_p
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
17666 gs
= gimplify_expr (expr_p
, pre_p
, NULL
, is_gimple_lvalue
, fb_lvalue
);
17667 if (gs
!= GS_ALL_DONE
)
17674 /* Gimplify an OMP_ATOMIC statement. */
17676 static enum gimplify_status
17677 gimplify_omp_atomic (tree
*expr_p
, gimple_seq
*pre_p
)
17679 tree addr
= TREE_OPERAND (*expr_p
, 0);
17680 tree rhs
= TREE_CODE (*expr_p
) == OMP_ATOMIC_READ
17681 ? NULL
: TREE_OPERAND (*expr_p
, 1);
17682 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr
)));
17684 gomp_atomic_load
*loadstmt
;
17685 gomp_atomic_store
*storestmt
;
17686 tree target_expr
= NULL_TREE
;
17688 tmp_load
= create_tmp_reg (type
);
17690 && goa_stabilize_expr (&rhs
, pre_p
, addr
, tmp_load
, target_expr
,
17694 if (gimplify_expr (&addr
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
17698 loadstmt
= gimple_build_omp_atomic_load (tmp_load
, addr
,
17699 OMP_ATOMIC_MEMORY_ORDER (*expr_p
));
17700 gimplify_seq_add_stmt (pre_p
, loadstmt
);
17703 /* BIT_INSERT_EXPR is not valid for non-integral bitfield
17704 representatives. Use BIT_FIELD_REF on the lhs instead. */
17706 if (TREE_CODE (rhs
) == COND_EXPR
)
17707 rhsarg
= TREE_OPERAND (rhs
, 1);
17708 if (TREE_CODE (rhsarg
) == BIT_INSERT_EXPR
17709 && !INTEGRAL_TYPE_P (TREE_TYPE (tmp_load
)))
17711 tree bitpos
= TREE_OPERAND (rhsarg
, 2);
17712 tree op1
= TREE_OPERAND (rhsarg
, 1);
17714 tree tmp_store
= tmp_load
;
17715 if (TREE_CODE (*expr_p
) == OMP_ATOMIC_CAPTURE_OLD
)
17716 tmp_store
= get_initialized_tmp_var (tmp_load
, pre_p
);
17717 if (INTEGRAL_TYPE_P (TREE_TYPE (op1
)))
17718 bitsize
= bitsize_int (TYPE_PRECISION (TREE_TYPE (op1
)));
17720 bitsize
= TYPE_SIZE (TREE_TYPE (op1
));
17721 gcc_assert (TREE_OPERAND (rhsarg
, 0) == tmp_load
);
17722 tree t
= build2_loc (EXPR_LOCATION (rhsarg
),
17723 MODIFY_EXPR
, void_type_node
,
17724 build3_loc (EXPR_LOCATION (rhsarg
),
17725 BIT_FIELD_REF
, TREE_TYPE (op1
),
17726 tmp_store
, bitsize
, bitpos
), op1
);
17727 if (TREE_CODE (rhs
) == COND_EXPR
)
17728 t
= build3_loc (EXPR_LOCATION (rhs
), COND_EXPR
, void_type_node
,
17729 TREE_OPERAND (rhs
, 0), t
, void_node
);
17730 gimplify_and_add (t
, pre_p
);
17733 bool save_allow_rhs_cond_expr
= gimplify_ctxp
->allow_rhs_cond_expr
;
17734 if (TREE_CODE (rhs
) == COND_EXPR
)
17735 gimplify_ctxp
->allow_rhs_cond_expr
= true;
17736 enum gimplify_status gs
= gimplify_expr (&rhs
, pre_p
, NULL
,
17737 is_gimple_val
, fb_rvalue
);
17738 gimplify_ctxp
->allow_rhs_cond_expr
= save_allow_rhs_cond_expr
;
17739 if (gs
!= GS_ALL_DONE
)
17743 if (TREE_CODE (*expr_p
) == OMP_ATOMIC_READ
)
17746 = gimple_build_omp_atomic_store (rhs
, OMP_ATOMIC_MEMORY_ORDER (*expr_p
));
17747 if (TREE_CODE (*expr_p
) != OMP_ATOMIC_READ
&& OMP_ATOMIC_WEAK (*expr_p
))
17749 gimple_omp_atomic_set_weak (loadstmt
);
17750 gimple_omp_atomic_set_weak (storestmt
);
17752 gimplify_seq_add_stmt (pre_p
, storestmt
);
17753 switch (TREE_CODE (*expr_p
))
17755 case OMP_ATOMIC_READ
:
17756 case OMP_ATOMIC_CAPTURE_OLD
:
17757 *expr_p
= tmp_load
;
17758 gimple_omp_atomic_set_need_value (loadstmt
);
17760 case OMP_ATOMIC_CAPTURE_NEW
:
17762 gimple_omp_atomic_set_need_value (storestmt
);
17769 return GS_ALL_DONE
;
17772 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
17773 body, and adding some EH bits. */
17775 static enum gimplify_status
17776 gimplify_transaction (tree
*expr_p
, gimple_seq
*pre_p
)
17778 tree expr
= *expr_p
, temp
, tbody
= TRANSACTION_EXPR_BODY (expr
);
17780 gtransaction
*trans_stmt
;
17781 gimple_seq body
= NULL
;
17784 /* Wrap the transaction body in a BIND_EXPR so we have a context
17785 where to put decls for OMP. */
17786 if (TREE_CODE (tbody
) != BIND_EXPR
)
17788 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, tbody
, NULL
);
17789 TREE_SIDE_EFFECTS (bind
) = 1;
17790 SET_EXPR_LOCATION (bind
, EXPR_LOCATION (tbody
));
17791 TRANSACTION_EXPR_BODY (expr
) = bind
;
17794 push_gimplify_context ();
17795 temp
= voidify_wrapper_expr (*expr_p
, NULL
);
17797 body_stmt
= gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr
), &body
);
17798 pop_gimplify_context (body_stmt
);
17800 trans_stmt
= gimple_build_transaction (body
);
17801 if (TRANSACTION_EXPR_OUTER (expr
))
17802 subcode
= GTMA_IS_OUTER
;
17803 else if (TRANSACTION_EXPR_RELAXED (expr
))
17804 subcode
= GTMA_IS_RELAXED
;
17805 gimple_transaction_set_subcode (trans_stmt
, subcode
);
17807 gimplify_seq_add_stmt (pre_p
, trans_stmt
);
17815 *expr_p
= NULL_TREE
;
17816 return GS_ALL_DONE
;
17819 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
17820 is the OMP_BODY of the original EXPR (which has already been
17821 gimplified so it's not present in the EXPR).
17823 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
17826 gimplify_omp_ordered (tree expr
, gimple_seq body
)
17831 tree source_c
= NULL_TREE
;
17832 tree sink_c
= NULL_TREE
;
17834 if (gimplify_omp_ctxp
)
17836 for (c
= OMP_ORDERED_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
17837 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DOACROSS
17838 && gimplify_omp_ctxp
->loop_iter_var
.is_empty ())
17840 error_at (OMP_CLAUSE_LOCATION (c
),
17841 "%<ordered%> construct with %qs clause must be "
17842 "closely nested inside a loop with %<ordered%> clause",
17843 OMP_CLAUSE_DOACROSS_DEPEND (c
) ? "depend" : "doacross");
17846 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DOACROSS
17847 && OMP_CLAUSE_DOACROSS_KIND (c
) == OMP_CLAUSE_DOACROSS_SINK
)
17851 if (OMP_CLAUSE_DECL (c
) == NULL_TREE
)
17852 continue; /* omp_cur_iteration - 1 */
17853 for (decls
= OMP_CLAUSE_DECL (c
), i
= 0;
17854 decls
&& TREE_CODE (decls
) == TREE_LIST
;
17855 decls
= TREE_CHAIN (decls
), ++i
)
17856 if (i
>= gimplify_omp_ctxp
->loop_iter_var
.length () / 2)
17858 else if (TREE_VALUE (decls
)
17859 != gimplify_omp_ctxp
->loop_iter_var
[2 * i
])
17861 error_at (OMP_CLAUSE_LOCATION (c
),
17862 "variable %qE is not an iteration "
17863 "of outermost loop %d, expected %qE",
17864 TREE_VALUE (decls
), i
+ 1,
17865 gimplify_omp_ctxp
->loop_iter_var
[2 * i
]);
17871 = gimplify_omp_ctxp
->loop_iter_var
[2 * i
+ 1];
17872 if (!fail
&& i
!= gimplify_omp_ctxp
->loop_iter_var
.length () / 2)
17874 error_at (OMP_CLAUSE_LOCATION (c
),
17875 "number of variables in %qs clause with "
17876 "%<sink%> modifier does not match number of "
17877 "iteration variables",
17878 OMP_CLAUSE_DOACROSS_DEPEND (c
)
17879 ? "depend" : "doacross");
17883 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DOACROSS
17884 && OMP_CLAUSE_DOACROSS_KIND (c
) == OMP_CLAUSE_DOACROSS_SOURCE
)
17888 error_at (OMP_CLAUSE_LOCATION (c
),
17889 "more than one %qs clause with %<source%> "
17890 "modifier on an %<ordered%> construct",
17891 OMP_CLAUSE_DOACROSS_DEPEND (source_c
)
17892 ? "depend" : "doacross");
17899 if (source_c
&& sink_c
)
17901 error_at (OMP_CLAUSE_LOCATION (source_c
),
17902 "%qs clause with %<source%> modifier specified "
17903 "together with %qs clauses with %<sink%> modifier "
17904 "on the same construct",
17905 OMP_CLAUSE_DOACROSS_DEPEND (source_c
) ? "depend" : "doacross",
17906 OMP_CLAUSE_DOACROSS_DEPEND (sink_c
) ? "depend" : "doacross");
17911 return gimple_build_nop ();
17912 return gimple_build_omp_ordered (body
, OMP_ORDERED_CLAUSES (expr
));
17915 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
17916 expression produces a value to be used as an operand inside a GIMPLE
17917 statement, the value will be stored back in *EXPR_P. This value will
17918 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
17919 an SSA_NAME. The corresponding sequence of GIMPLE statements is
17920 emitted in PRE_P and POST_P.
17922 Additionally, this process may overwrite parts of the input
17923 expression during gimplification. Ideally, it should be
17924 possible to do non-destructive gimplification.
17926 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
17927 the expression needs to evaluate to a value to be used as
17928 an operand in a GIMPLE statement, this value will be stored in
17929 *EXPR_P on exit. This happens when the caller specifies one
17930 of fb_lvalue or fb_rvalue fallback flags.
17932 PRE_P will contain the sequence of GIMPLE statements corresponding
17933 to the evaluation of EXPR and all the side-effects that must
17934 be executed before the main expression. On exit, the last
17935 statement of PRE_P is the core statement being gimplified. For
17936 instance, when gimplifying 'if (++a)' the last statement in
17937 PRE_P will be 'if (t.1)' where t.1 is the result of
17938 pre-incrementing 'a'.
17940 POST_P will contain the sequence of GIMPLE statements corresponding
17941 to the evaluation of all the side-effects that must be executed
17942 after the main expression. If this is NULL, the post
17943 side-effects are stored at the end of PRE_P.
17945 The reason why the output is split in two is to handle post
17946 side-effects explicitly. In some cases, an expression may have
17947 inner and outer post side-effects which need to be emitted in
17948 an order different from the one given by the recursive
17949 traversal. For instance, for the expression (*p--)++ the post
17950 side-effects of '--' must actually occur *after* the post
17951 side-effects of '++'. However, gimplification will first visit
17952 the inner expression, so if a separate POST sequence was not
17953 used, the resulting sequence would be:
17960 However, the post-decrement operation in line #2 must not be
17961 evaluated until after the store to *p at line #4, so the
17962 correct sequence should be:
17969 So, by specifying a separate post queue, it is possible
17970 to emit the post side-effects in the correct order.
17971 If POST_P is NULL, an internal queue will be used. Before
17972 returning to the caller, the sequence POST_P is appended to
17973 the main output sequence PRE_P.
17975 GIMPLE_TEST_F points to a function that takes a tree T and
17976 returns nonzero if T is in the GIMPLE form requested by the
17977 caller. The GIMPLE predicates are in gimple.cc.
17979 FALLBACK tells the function what sort of a temporary we want if
17980 gimplification cannot produce an expression that complies with
17983 fb_none means that no temporary should be generated
17984 fb_rvalue means that an rvalue is OK to generate
17985 fb_lvalue means that an lvalue is OK to generate
17986 fb_either means that either is OK, but an lvalue is preferable.
17987 fb_mayfail means that gimplification may fail (in which case
17988 GS_ERROR will be returned)
17990 The return value is either GS_ERROR or GS_ALL_DONE, since this
17991 function iterates until EXPR is completely gimplified or an error
17994 enum gimplify_status
17995 gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
17996 bool (*gimple_test_f
) (tree
), fallback_t fallback
)
17999 gimple_seq internal_pre
= NULL
;
18000 gimple_seq internal_post
= NULL
;
18003 location_t saved_location
;
18004 enum gimplify_status ret
;
18005 gimple_stmt_iterator pre_last_gsi
, post_last_gsi
;
18008 save_expr
= *expr_p
;
18009 if (save_expr
== NULL_TREE
)
18010 return GS_ALL_DONE
;
18012 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
18013 is_statement
= gimple_test_f
== is_gimple_stmt
;
18015 gcc_assert (pre_p
);
18017 /* Consistency checks. */
18018 if (gimple_test_f
== is_gimple_reg
)
18019 gcc_assert (fallback
& (fb_rvalue
| fb_lvalue
));
18020 else if (gimple_test_f
== is_gimple_val
18021 || gimple_test_f
== is_gimple_call_addr
18022 || gimple_test_f
== is_gimple_condexpr_for_cond
18023 || gimple_test_f
== is_gimple_mem_rhs
18024 || gimple_test_f
== is_gimple_mem_rhs_or_call
18025 || gimple_test_f
== is_gimple_reg_rhs
18026 || gimple_test_f
== is_gimple_reg_rhs_or_call
18027 || gimple_test_f
== is_gimple_asm_val
18028 || gimple_test_f
== is_gimple_mem_ref_addr
)
18029 gcc_assert (fallback
& fb_rvalue
);
18030 else if (gimple_test_f
== is_gimple_min_lval
18031 || gimple_test_f
== is_gimple_lvalue
)
18032 gcc_assert (fallback
& fb_lvalue
);
18033 else if (gimple_test_f
== is_gimple_addressable
)
18034 gcc_assert (fallback
& fb_either
);
18035 else if (gimple_test_f
== is_gimple_stmt
)
18036 gcc_assert (fallback
== fb_none
);
18039 /* We should have recognized the GIMPLE_TEST_F predicate to
18040 know what kind of fallback to use in case a temporary is
18041 needed to hold the value or address of *EXPR_P. */
18042 gcc_unreachable ();
18045 /* We used to check the predicate here and return immediately if it
18046 succeeds. This is wrong; the design is for gimplification to be
18047 idempotent, and for the predicates to only test for valid forms, not
18048 whether they are fully simplified. */
18050 pre_p
= &internal_pre
;
18052 if (post_p
== NULL
)
18053 post_p
= &internal_post
;
18055 /* Remember the last statements added to PRE_P and POST_P. Every
18056 new statement added by the gimplification helpers needs to be
18057 annotated with location information. To centralize the
18058 responsibility, we remember the last statement that had been
18059 added to both queues before gimplifying *EXPR_P. If
18060 gimplification produces new statements in PRE_P and POST_P, those
18061 statements will be annotated with the same location information
18063 pre_last_gsi
= gsi_last (*pre_p
);
18064 post_last_gsi
= gsi_last (*post_p
);
18066 saved_location
= input_location
;
18067 if (save_expr
!= error_mark_node
18068 && EXPR_HAS_LOCATION (*expr_p
))
18069 input_location
= EXPR_LOCATION (*expr_p
);
18071 /* Loop over the specific gimplifiers until the toplevel node
18072 remains the same. */
18075 /* Strip away as many useless type conversions as possible
18076 at the toplevel. */
18077 STRIP_USELESS_TYPE_CONVERSION (*expr_p
);
18079 /* Remember the expr. */
18080 save_expr
= *expr_p
;
18082 /* Die, die, die, my darling. */
18083 if (error_operand_p (save_expr
))
18089 /* Do any language-specific gimplification. */
18090 ret
= ((enum gimplify_status
)
18091 lang_hooks
.gimplify_expr (expr_p
, pre_p
, post_p
));
18094 if (*expr_p
== NULL_TREE
)
18096 if (*expr_p
!= save_expr
)
18099 else if (ret
!= GS_UNHANDLED
)
18102 /* Make sure that all the cases set 'ret' appropriately. */
18103 ret
= GS_UNHANDLED
;
18104 switch (TREE_CODE (*expr_p
))
18106 /* First deal with the special cases. */
18108 case POSTINCREMENT_EXPR
:
18109 case POSTDECREMENT_EXPR
:
18110 case PREINCREMENT_EXPR
:
18111 case PREDECREMENT_EXPR
:
18112 ret
= gimplify_self_mod_expr (expr_p
, pre_p
, post_p
,
18113 fallback
!= fb_none
,
18114 TREE_TYPE (*expr_p
));
18117 case VIEW_CONVERT_EXPR
:
18118 if ((fallback
& fb_rvalue
)
18119 && is_gimple_reg_type (TREE_TYPE (*expr_p
))
18120 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p
, 0))))
18122 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
18123 post_p
, is_gimple_val
, fb_rvalue
);
18124 recalculate_side_effects (*expr_p
);
18130 case ARRAY_RANGE_REF
:
18131 case REALPART_EXPR
:
18132 case IMAGPART_EXPR
:
18133 case COMPONENT_REF
:
18134 ret
= gimplify_compound_lval (expr_p
, pre_p
, post_p
,
18135 fallback
? fallback
: fb_rvalue
);
18139 ret
= gimplify_cond_expr (expr_p
, pre_p
, fallback
);
18141 /* C99 code may assign to an array in a structure value of a
18142 conditional expression, and this has undefined behavior
18143 only on execution, so create a temporary if an lvalue is
18145 if (fallback
== fb_lvalue
)
18147 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, post_p
, false);
18148 mark_addressable (*expr_p
);
18154 ret
= gimplify_call_expr (expr_p
, pre_p
, fallback
!= fb_none
);
18156 /* C99 code may assign to an array in a structure returned
18157 from a function, and this has undefined behavior only on
18158 execution, so create a temporary if an lvalue is
18160 if (fallback
== fb_lvalue
)
18162 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, post_p
, false);
18163 mark_addressable (*expr_p
);
18169 gcc_unreachable ();
18171 case OMP_ARRAY_SECTION
:
18172 gcc_unreachable ();
18174 case COMPOUND_EXPR
:
18175 ret
= gimplify_compound_expr (expr_p
, pre_p
, fallback
!= fb_none
);
18178 case COMPOUND_LITERAL_EXPR
:
18179 ret
= gimplify_compound_literal_expr (expr_p
, pre_p
,
18180 gimple_test_f
, fallback
);
18185 ret
= gimplify_modify_expr (expr_p
, pre_p
, post_p
,
18186 fallback
!= fb_none
);
18189 case TRUTH_ANDIF_EXPR
:
18190 case TRUTH_ORIF_EXPR
:
18192 /* Preserve the original type of the expression and the
18193 source location of the outer expression. */
18194 tree org_type
= TREE_TYPE (*expr_p
);
18195 *expr_p
= gimple_boolify (*expr_p
);
18196 *expr_p
= build3_loc (input_location
, COND_EXPR
,
18200 org_type
, boolean_true_node
),
18203 org_type
, boolean_false_node
));
18208 case TRUTH_NOT_EXPR
:
18210 tree type
= TREE_TYPE (*expr_p
);
18211 /* The parsers are careful to generate TRUTH_NOT_EXPR
18212 only with operands that are always zero or one.
18213 We do not fold here but handle the only interesting case
18214 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
18215 *expr_p
= gimple_boolify (*expr_p
);
18216 if (TYPE_PRECISION (TREE_TYPE (*expr_p
)) == 1)
18217 *expr_p
= build1_loc (input_location
, BIT_NOT_EXPR
,
18218 TREE_TYPE (*expr_p
),
18219 TREE_OPERAND (*expr_p
, 0));
18221 *expr_p
= build2_loc (input_location
, BIT_XOR_EXPR
,
18222 TREE_TYPE (*expr_p
),
18223 TREE_OPERAND (*expr_p
, 0),
18224 build_int_cst (TREE_TYPE (*expr_p
), 1));
18225 if (!useless_type_conversion_p (type
, TREE_TYPE (*expr_p
)))
18226 *expr_p
= fold_convert_loc (input_location
, type
, *expr_p
);
18232 ret
= gimplify_addr_expr (expr_p
, pre_p
, post_p
);
18235 case ANNOTATE_EXPR
:
18237 tree cond
= TREE_OPERAND (*expr_p
, 0);
18238 tree kind
= TREE_OPERAND (*expr_p
, 1);
18239 tree data
= TREE_OPERAND (*expr_p
, 2);
18240 tree type
= TREE_TYPE (cond
);
18241 if (!INTEGRAL_TYPE_P (type
))
18247 tree tmp
= create_tmp_var (type
);
18248 gimplify_arg (&cond
, pre_p
, EXPR_LOCATION (*expr_p
));
18250 = gimple_build_call_internal (IFN_ANNOTATE
, 3, cond
, kind
, data
);
18251 gimple_call_set_lhs (call
, tmp
);
18252 gimplify_seq_add_stmt (pre_p
, call
);
18259 ret
= gimplify_va_arg_expr (expr_p
, pre_p
, post_p
);
18263 if (IS_EMPTY_STMT (*expr_p
))
18269 if (VOID_TYPE_P (TREE_TYPE (*expr_p
))
18270 || fallback
== fb_none
)
18272 /* Just strip a conversion to void (or in void context) and
18274 *expr_p
= TREE_OPERAND (*expr_p
, 0);
18279 ret
= gimplify_conversion (expr_p
);
18280 if (ret
== GS_ERROR
)
18282 if (*expr_p
!= save_expr
)
18286 case FIX_TRUNC_EXPR
:
18287 /* unary_expr: ... | '(' cast ')' val | ... */
18288 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
18289 is_gimple_val
, fb_rvalue
);
18290 recalculate_side_effects (*expr_p
);
18295 bool volatilep
= TREE_THIS_VOLATILE (*expr_p
);
18296 bool notrap
= TREE_THIS_NOTRAP (*expr_p
);
18297 tree saved_ptr_type
= TREE_TYPE (TREE_OPERAND (*expr_p
, 0));
18299 *expr_p
= fold_indirect_ref_loc (input_location
, *expr_p
);
18300 if (*expr_p
!= save_expr
)
18306 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
18307 is_gimple_reg
, fb_rvalue
);
18308 if (ret
== GS_ERROR
)
18311 recalculate_side_effects (*expr_p
);
18312 *expr_p
= fold_build2_loc (input_location
, MEM_REF
,
18313 TREE_TYPE (*expr_p
),
18314 TREE_OPERAND (*expr_p
, 0),
18315 build_int_cst (saved_ptr_type
, 0));
18316 TREE_THIS_VOLATILE (*expr_p
) = volatilep
;
18317 TREE_THIS_NOTRAP (*expr_p
) = notrap
;
18322 /* We arrive here through the various re-gimplifcation paths. */
18324 /* First try re-folding the whole thing. */
18325 tmp
= fold_binary (MEM_REF
, TREE_TYPE (*expr_p
),
18326 TREE_OPERAND (*expr_p
, 0),
18327 TREE_OPERAND (*expr_p
, 1));
18330 REF_REVERSE_STORAGE_ORDER (tmp
)
18331 = REF_REVERSE_STORAGE_ORDER (*expr_p
);
18333 recalculate_side_effects (*expr_p
);
18337 /* Avoid re-gimplifying the address operand if it is already
18338 in suitable form. Re-gimplifying would mark the address
18339 operand addressable. Always gimplify when not in SSA form
18340 as we still may have to gimplify decls with value-exprs. */
18341 if (!gimplify_ctxp
|| !gimple_in_ssa_p (cfun
)
18342 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p
, 0)))
18344 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
18345 is_gimple_mem_ref_addr
, fb_rvalue
);
18346 if (ret
== GS_ERROR
)
18349 recalculate_side_effects (*expr_p
);
18353 /* Constants need not be gimplified. */
18360 /* Drop the overflow flag on constants, we do not want
18361 that in the GIMPLE IL. */
18362 if (TREE_OVERFLOW_P (*expr_p
))
18363 *expr_p
= drop_tree_overflow (*expr_p
);
18368 /* If we require an lvalue, such as for ADDR_EXPR, retain the
18369 CONST_DECL node. Otherwise the decl is replaceable by its
18371 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
18372 if (fallback
& fb_lvalue
)
18376 *expr_p
= DECL_INITIAL (*expr_p
);
18382 ret
= gimplify_decl_expr (expr_p
, pre_p
);
18386 ret
= gimplify_bind_expr (expr_p
, pre_p
);
18390 ret
= gimplify_loop_expr (expr_p
, pre_p
);
18394 ret
= gimplify_switch_expr (expr_p
, pre_p
);
18398 ret
= gimplify_exit_expr (expr_p
);
18402 /* If the target is not LABEL, then it is a computed jump
18403 and the target needs to be gimplified. */
18404 if (TREE_CODE (GOTO_DESTINATION (*expr_p
)) != LABEL_DECL
)
18406 ret
= gimplify_expr (&GOTO_DESTINATION (*expr_p
), pre_p
,
18407 NULL
, is_gimple_val
, fb_rvalue
);
18408 if (ret
== GS_ERROR
)
18411 gimplify_seq_add_stmt (pre_p
,
18412 gimple_build_goto (GOTO_DESTINATION (*expr_p
)));
18417 gimplify_seq_add_stmt (pre_p
,
18418 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p
),
18419 PREDICT_EXPR_OUTCOME (*expr_p
)));
18424 ret
= gimplify_label_expr (expr_p
, pre_p
);
18425 label
= LABEL_EXPR_LABEL (*expr_p
);
18426 gcc_assert (decl_function_context (label
) == current_function_decl
);
18428 /* If the label is used in a goto statement, or address of the label
18429 is taken, we need to unpoison all variables that were seen so far.
18430 Doing so would prevent us from reporting a false positives. */
18431 if (asan_poisoned_variables
18432 && asan_used_labels
!= NULL
18433 && asan_used_labels
->contains (label
)
18434 && !gimplify_omp_ctxp
)
18435 asan_poison_variables (asan_poisoned_variables
, false, pre_p
);
18438 case CASE_LABEL_EXPR
:
18439 ret
= gimplify_case_label_expr (expr_p
, pre_p
);
18441 if (gimplify_ctxp
->live_switch_vars
)
18442 asan_poison_variables (gimplify_ctxp
->live_switch_vars
, false,
18447 ret
= gimplify_return_expr (*expr_p
, pre_p
);
18451 /* Don't reduce this in place; let gimplify_init_constructor work its
18452 magic. Buf if we're just elaborating this for side effects, just
18453 gimplify any element that has side-effects. */
18454 if (fallback
== fb_none
)
18456 unsigned HOST_WIDE_INT ix
;
18458 tree temp
= NULL_TREE
;
18459 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p
), ix
, val
)
18460 if (TREE_SIDE_EFFECTS (val
))
18461 append_to_statement_list (val
, &temp
);
18464 ret
= temp
? GS_OK
: GS_ALL_DONE
;
18466 /* C99 code may assign to an array in a constructed
18467 structure or union, and this has undefined behavior only
18468 on execution, so create a temporary if an lvalue is
18470 else if (fallback
== fb_lvalue
)
18472 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, post_p
, false);
18473 mark_addressable (*expr_p
);
18480 /* The following are special cases that are not handled by the
18481 original GIMPLE grammar. */
18483 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
18486 ret
= gimplify_save_expr (expr_p
, pre_p
, post_p
);
18489 case BIT_FIELD_REF
:
18490 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
18491 post_p
, is_gimple_lvalue
, fb_either
);
18492 recalculate_side_effects (*expr_p
);
18495 case TARGET_MEM_REF
:
18497 enum gimplify_status r0
= GS_ALL_DONE
, r1
= GS_ALL_DONE
;
18499 if (TMR_BASE (*expr_p
))
18500 r0
= gimplify_expr (&TMR_BASE (*expr_p
), pre_p
,
18501 post_p
, is_gimple_mem_ref_addr
, fb_either
);
18502 if (TMR_INDEX (*expr_p
))
18503 r1
= gimplify_expr (&TMR_INDEX (*expr_p
), pre_p
,
18504 post_p
, is_gimple_val
, fb_rvalue
);
18505 if (TMR_INDEX2 (*expr_p
))
18506 r1
= gimplify_expr (&TMR_INDEX2 (*expr_p
), pre_p
,
18507 post_p
, is_gimple_val
, fb_rvalue
);
18508 /* TMR_STEP and TMR_OFFSET are always integer constants. */
18509 ret
= MIN (r0
, r1
);
18513 case NON_LVALUE_EXPR
:
18514 /* This should have been stripped above. */
18515 gcc_unreachable ();
18518 ret
= gimplify_asm_expr (expr_p
, pre_p
, post_p
);
18521 case TRY_FINALLY_EXPR
:
18522 case TRY_CATCH_EXPR
:
18524 gimple_seq eval
, cleanup
;
18527 /* Calls to destructors are generated automatically in FINALLY/CATCH
18528 block. They should have location as UNKNOWN_LOCATION. However,
18529 gimplify_call_expr will reset these call stmts to input_location
18530 if it finds stmt's location is unknown. To prevent resetting for
18531 destructors, we set the input_location to unknown.
18532 Note that this only affects the destructor calls in FINALLY/CATCH
18533 block, and will automatically reset to its original value by the
18534 end of gimplify_expr. */
18535 input_location
= UNKNOWN_LOCATION
;
18536 eval
= cleanup
= NULL
;
18537 gimplify_and_add (TREE_OPERAND (*expr_p
, 0), &eval
);
18538 bool save_in_handler_expr
= gimplify_ctxp
->in_handler_expr
;
18539 if (TREE_CODE (*expr_p
) == TRY_FINALLY_EXPR
18540 && TREE_CODE (TREE_OPERAND (*expr_p
, 1)) == EH_ELSE_EXPR
)
18542 gimple_seq n
= NULL
, e
= NULL
;
18543 gimplify_ctxp
->in_handler_expr
= true;
18544 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p
, 1),
18546 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p
, 1),
18548 if (!gimple_seq_empty_p (n
) || !gimple_seq_empty_p (e
))
18550 geh_else
*stmt
= gimple_build_eh_else (n
, e
);
18551 gimple_seq_add_stmt (&cleanup
, stmt
);
18556 gimplify_ctxp
->in_handler_expr
= true;
18557 gimplify_and_add (TREE_OPERAND (*expr_p
, 1), &cleanup
);
18559 gimplify_ctxp
->in_handler_expr
= save_in_handler_expr
;
18560 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
18561 if (gimple_seq_empty_p (cleanup
))
18563 gimple_seq_add_seq (pre_p
, eval
);
18567 try_
= gimple_build_try (eval
, cleanup
,
18568 TREE_CODE (*expr_p
) == TRY_FINALLY_EXPR
18569 ? GIMPLE_TRY_FINALLY
18570 : GIMPLE_TRY_CATCH
);
18571 if (EXPR_HAS_LOCATION (save_expr
))
18572 gimple_set_location (try_
, EXPR_LOCATION (save_expr
));
18573 else if (LOCATION_LOCUS (saved_location
) != UNKNOWN_LOCATION
)
18574 gimple_set_location (try_
, saved_location
);
18575 if (TREE_CODE (*expr_p
) == TRY_CATCH_EXPR
)
18576 gimple_try_set_catch_is_cleanup (try_
,
18577 TRY_CATCH_IS_CLEANUP (*expr_p
));
18578 gimplify_seq_add_stmt (pre_p
, try_
);
18583 case CLEANUP_POINT_EXPR
:
18584 ret
= gimplify_cleanup_point_expr (expr_p
, pre_p
);
18588 ret
= gimplify_target_expr (expr_p
, pre_p
, post_p
);
18594 gimple_seq handler
= NULL
;
18595 gimplify_and_add (CATCH_BODY (*expr_p
), &handler
);
18596 c
= gimple_build_catch (CATCH_TYPES (*expr_p
), handler
);
18597 gimplify_seq_add_stmt (pre_p
, c
);
18602 case EH_FILTER_EXPR
:
18605 gimple_seq failure
= NULL
;
18607 gimplify_and_add (EH_FILTER_FAILURE (*expr_p
), &failure
);
18608 ehf
= gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p
), failure
);
18609 copy_warning (ehf
, *expr_p
);
18610 gimplify_seq_add_stmt (pre_p
, ehf
);
18617 enum gimplify_status r0
, r1
;
18618 r0
= gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p
), pre_p
,
18619 post_p
, is_gimple_val
, fb_rvalue
);
18620 r1
= gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p
), pre_p
,
18621 post_p
, is_gimple_val
, fb_rvalue
);
18622 TREE_SIDE_EFFECTS (*expr_p
) = 0;
18623 ret
= MIN (r0
, r1
);
18628 /* We get here when taking the address of a label. We mark
18629 the label as "forced"; meaning it can never be removed and
18630 it is a potential target for any computed goto. */
18631 FORCED_LABEL (*expr_p
) = 1;
18635 case STATEMENT_LIST
:
18636 ret
= gimplify_statement_list (expr_p
, pre_p
);
18639 case WITH_SIZE_EXPR
:
18641 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
18642 post_p
== &internal_post
? NULL
: post_p
,
18643 gimple_test_f
, fallback
);
18644 gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
, post_p
,
18645 is_gimple_val
, fb_rvalue
);
18652 ret
= gimplify_var_or_parm_decl (expr_p
);
18656 /* When within an OMP context, notice uses of variables. */
18657 if (gimplify_omp_ctxp
)
18658 omp_notice_variable (gimplify_omp_ctxp
, *expr_p
, true);
18659 /* Handlers can refer to the function result; if that has been
18660 moved, we need to track it. */
18661 if (gimplify_ctxp
->in_handler_expr
&& gimplify_ctxp
->return_temp
)
18662 *expr_p
= gimplify_ctxp
->return_temp
;
18666 case DEBUG_EXPR_DECL
:
18667 gcc_unreachable ();
18669 case DEBUG_BEGIN_STMT
:
18670 gimplify_seq_add_stmt (pre_p
,
18671 gimple_build_debug_begin_stmt
18672 (TREE_BLOCK (*expr_p
),
18673 EXPR_LOCATION (*expr_p
)));
18679 /* Allow callbacks into the gimplifier during optimization. */
18684 gimplify_omp_parallel (expr_p
, pre_p
);
18689 gimplify_omp_task (expr_p
, pre_p
);
18695 /* Temporarily disable into_ssa, as scan_omp_simd
18696 which calls copy_gimple_seq_and_replace_locals can't deal
18697 with SSA_NAMEs defined outside of the body properly. */
18698 bool saved_into_ssa
= gimplify_ctxp
->into_ssa
;
18699 gimplify_ctxp
->into_ssa
= false;
18700 ret
= gimplify_omp_for (expr_p
, pre_p
);
18701 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
18706 case OMP_DISTRIBUTE
:
18711 ret
= gimplify_omp_for (expr_p
, pre_p
);
18715 ret
= gimplify_omp_loop (expr_p
, pre_p
);
18719 gimplify_oacc_cache (expr_p
, pre_p
);
18724 gimplify_oacc_declare (expr_p
, pre_p
);
18728 case OACC_HOST_DATA
:
18731 case OACC_PARALLEL
:
18737 case OMP_TARGET_DATA
:
18739 gimplify_omp_workshare (expr_p
, pre_p
);
18743 case OACC_ENTER_DATA
:
18744 case OACC_EXIT_DATA
:
18746 case OMP_TARGET_UPDATE
:
18747 case OMP_TARGET_ENTER_DATA
:
18748 case OMP_TARGET_EXIT_DATA
:
18749 gimplify_omp_target_update (expr_p
, pre_p
);
18754 case OMP_STRUCTURED_BLOCK
:
18761 gimple_seq body
= NULL
;
18763 bool saved_in_omp_construct
= in_omp_construct
;
18765 in_omp_construct
= true;
18766 gimplify_and_add (OMP_BODY (*expr_p
), &body
);
18767 in_omp_construct
= saved_in_omp_construct
;
18768 switch (TREE_CODE (*expr_p
))
18771 g
= gimple_build_omp_section (body
);
18773 case OMP_STRUCTURED_BLOCK
:
18774 g
= gimple_build_omp_structured_block (body
);
18777 g
= gimple_build_omp_master (body
);
18780 g
= gimplify_omp_ordered (*expr_p
, body
);
18781 if (OMP_BODY (*expr_p
) == NULL_TREE
18782 && gimple_code (g
) == GIMPLE_OMP_ORDERED
)
18783 gimple_omp_ordered_standalone (g
);
18786 gimplify_scan_omp_clauses (&OMP_MASKED_CLAUSES (*expr_p
),
18787 pre_p
, ORT_WORKSHARE
, OMP_MASKED
);
18788 gimplify_adjust_omp_clauses (pre_p
, body
,
18789 &OMP_MASKED_CLAUSES (*expr_p
),
18791 g
= gimple_build_omp_masked (body
,
18792 OMP_MASKED_CLAUSES (*expr_p
));
18795 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p
),
18796 pre_p
, ORT_WORKSHARE
, OMP_CRITICAL
);
18797 gimplify_adjust_omp_clauses (pre_p
, body
,
18798 &OMP_CRITICAL_CLAUSES (*expr_p
),
18800 g
= gimple_build_omp_critical (body
,
18801 OMP_CRITICAL_NAME (*expr_p
),
18802 OMP_CRITICAL_CLAUSES (*expr_p
));
18805 gimplify_scan_omp_clauses (&OMP_SCAN_CLAUSES (*expr_p
),
18806 pre_p
, ORT_WORKSHARE
, OMP_SCAN
);
18807 gimplify_adjust_omp_clauses (pre_p
, body
,
18808 &OMP_SCAN_CLAUSES (*expr_p
),
18810 g
= gimple_build_omp_scan (body
, OMP_SCAN_CLAUSES (*expr_p
));
18813 gcc_unreachable ();
18815 gimplify_seq_add_stmt (pre_p
, g
);
18820 case OMP_TASKGROUP
:
18822 gimple_seq body
= NULL
;
18824 tree
*pclauses
= &OMP_TASKGROUP_CLAUSES (*expr_p
);
18825 bool saved_in_omp_construct
= in_omp_construct
;
18826 gimplify_scan_omp_clauses (pclauses
, pre_p
, ORT_TASKGROUP
,
18828 gimplify_adjust_omp_clauses (pre_p
, NULL
, pclauses
, OMP_TASKGROUP
);
18830 in_omp_construct
= true;
18831 gimplify_and_add (OMP_BODY (*expr_p
), &body
);
18832 in_omp_construct
= saved_in_omp_construct
;
18833 gimple_seq cleanup
= NULL
;
18834 tree fn
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END
);
18835 gimple
*g
= gimple_build_call (fn
, 0);
18836 gimple_seq_add_stmt (&cleanup
, g
);
18837 g
= gimple_build_try (body
, cleanup
, GIMPLE_TRY_FINALLY
);
18839 gimple_seq_add_stmt (&body
, g
);
18840 g
= gimple_build_omp_taskgroup (body
, *pclauses
);
18841 gimplify_seq_add_stmt (pre_p
, g
);
18847 case OMP_ATOMIC_READ
:
18848 case OMP_ATOMIC_CAPTURE_OLD
:
18849 case OMP_ATOMIC_CAPTURE_NEW
:
18850 ret
= gimplify_omp_atomic (expr_p
, pre_p
);
18853 case TRANSACTION_EXPR
:
18854 ret
= gimplify_transaction (expr_p
, pre_p
);
18857 case TRUTH_AND_EXPR
:
18858 case TRUTH_OR_EXPR
:
18859 case TRUTH_XOR_EXPR
:
18861 tree orig_type
= TREE_TYPE (*expr_p
);
18862 tree new_type
, xop0
, xop1
;
18863 *expr_p
= gimple_boolify (*expr_p
);
18864 new_type
= TREE_TYPE (*expr_p
);
18865 if (!useless_type_conversion_p (orig_type
, new_type
))
18867 *expr_p
= fold_convert_loc (input_location
, orig_type
, *expr_p
);
18872 /* Boolified binary truth expressions are semantically equivalent
18873 to bitwise binary expressions. Canonicalize them to the
18874 bitwise variant. */
18875 switch (TREE_CODE (*expr_p
))
18877 case TRUTH_AND_EXPR
:
18878 TREE_SET_CODE (*expr_p
, BIT_AND_EXPR
);
18880 case TRUTH_OR_EXPR
:
18881 TREE_SET_CODE (*expr_p
, BIT_IOR_EXPR
);
18883 case TRUTH_XOR_EXPR
:
18884 TREE_SET_CODE (*expr_p
, BIT_XOR_EXPR
);
18889 /* Now make sure that operands have compatible type to
18890 expression's new_type. */
18891 xop0
= TREE_OPERAND (*expr_p
, 0);
18892 xop1
= TREE_OPERAND (*expr_p
, 1);
18893 if (!useless_type_conversion_p (new_type
, TREE_TYPE (xop0
)))
18894 TREE_OPERAND (*expr_p
, 0) = fold_convert_loc (input_location
,
18897 if (!useless_type_conversion_p (new_type
, TREE_TYPE (xop1
)))
18898 TREE_OPERAND (*expr_p
, 1) = fold_convert_loc (input_location
,
18901 /* Continue classified as tcc_binary. */
18905 case VEC_COND_EXPR
:
18908 case VEC_PERM_EXPR
:
18909 /* Classified as tcc_expression. */
18912 case BIT_INSERT_EXPR
:
18913 /* Argument 3 is a constant. */
18916 case POINTER_PLUS_EXPR
:
18918 enum gimplify_status r0
, r1
;
18919 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
18920 post_p
, is_gimple_val
, fb_rvalue
);
18921 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
18922 post_p
, is_gimple_val
, fb_rvalue
);
18923 recalculate_side_effects (*expr_p
);
18924 ret
= MIN (r0
, r1
);
18929 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p
)))
18931 case tcc_comparison
:
18932 /* Handle comparison of objects of non scalar mode aggregates
18933 with a call to memcmp. It would be nice to only have to do
18934 this for variable-sized objects, but then we'd have to allow
18935 the same nest of reference nodes we allow for MODIFY_EXPR and
18936 that's too complex.
18938 Compare scalar mode aggregates as scalar mode values. Using
18939 memcmp for them would be very inefficient at best, and is
18940 plain wrong if bitfields are involved. */
18941 if (error_operand_p (TREE_OPERAND (*expr_p
, 1)))
18945 tree type
= TREE_TYPE (TREE_OPERAND (*expr_p
, 1));
18947 /* Vector comparisons need no boolification. */
18948 if (TREE_CODE (type
) == VECTOR_TYPE
)
18950 else if (!AGGREGATE_TYPE_P (type
))
18952 tree org_type
= TREE_TYPE (*expr_p
);
18953 *expr_p
= gimple_boolify (*expr_p
);
18954 if (!useless_type_conversion_p (org_type
,
18955 TREE_TYPE (*expr_p
)))
18957 *expr_p
= fold_convert_loc (input_location
,
18958 org_type
, *expr_p
);
18964 else if (SCALAR_INT_MODE_P (TYPE_MODE (type
)))
18965 ret
= gimplify_scalar_mode_aggregate_compare (expr_p
);
18967 ret
= gimplify_variable_sized_compare (expr_p
);
18971 /* If *EXPR_P does not need to be special-cased, handle it
18972 according to its class. */
18974 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
18975 post_p
, is_gimple_val
, fb_rvalue
);
18981 enum gimplify_status r0
, r1
;
18983 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
18984 post_p
, is_gimple_val
, fb_rvalue
);
18985 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
18986 post_p
, is_gimple_val
, fb_rvalue
);
18988 ret
= MIN (r0
, r1
);
18994 enum gimplify_status r0
, r1
, r2
;
18996 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
18997 post_p
, is_gimple_val
, fb_rvalue
);
18998 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
18999 post_p
, is_gimple_val
, fb_rvalue
);
19000 r2
= gimplify_expr (&TREE_OPERAND (*expr_p
, 2), pre_p
,
19001 post_p
, is_gimple_val
, fb_rvalue
);
19003 ret
= MIN (MIN (r0
, r1
), r2
);
19007 case tcc_declaration
:
19010 goto dont_recalculate
;
19013 gcc_unreachable ();
19016 recalculate_side_effects (*expr_p
);
19022 gcc_assert (*expr_p
|| ret
!= GS_OK
);
19024 while (ret
== GS_OK
);
19026 /* If we encountered an error_mark somewhere nested inside, either
19027 stub out the statement or propagate the error back out. */
19028 if (ret
== GS_ERROR
)
19035 /* This was only valid as a return value from the langhook, which
19036 we handled. Make sure it doesn't escape from any other context. */
19037 gcc_assert (ret
!= GS_UNHANDLED
);
19039 if (fallback
== fb_none
&& *expr_p
&& !is_gimple_stmt (*expr_p
))
19041 /* We aren't looking for a value, and we don't have a valid
19042 statement. If it doesn't have side-effects, throw it away.
19043 We can also get here with code such as "*&&L;", where L is
19044 a LABEL_DECL that is marked as FORCED_LABEL. */
19045 if (TREE_CODE (*expr_p
) == LABEL_DECL
19046 || !TREE_SIDE_EFFECTS (*expr_p
))
19048 else if (!TREE_THIS_VOLATILE (*expr_p
))
19050 /* This is probably a _REF that contains something nested that
19051 has side effects. Recurse through the operands to find it. */
19052 enum tree_code code
= TREE_CODE (*expr_p
);
19056 case COMPONENT_REF
:
19057 case REALPART_EXPR
:
19058 case IMAGPART_EXPR
:
19059 case VIEW_CONVERT_EXPR
:
19060 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
19061 gimple_test_f
, fallback
);
19065 case ARRAY_RANGE_REF
:
19066 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
19067 gimple_test_f
, fallback
);
19068 gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
, post_p
,
19069 gimple_test_f
, fallback
);
19073 /* Anything else with side-effects must be converted to
19074 a valid statement before we get here. */
19075 gcc_unreachable ();
19080 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p
))
19081 && TYPE_MODE (TREE_TYPE (*expr_p
)) != BLKmode
19082 && !is_empty_type (TREE_TYPE (*expr_p
)))
19084 /* Historically, the compiler has treated a bare reference
19085 to a non-BLKmode volatile lvalue as forcing a load. */
19086 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p
));
19088 /* Normally, we do not want to create a temporary for a
19089 TREE_ADDRESSABLE type because such a type should not be
19090 copied by bitwise-assignment. However, we make an
19091 exception here, as all we are doing here is ensuring that
19092 we read the bytes that make up the type. We use
19093 create_tmp_var_raw because create_tmp_var will abort when
19094 given a TREE_ADDRESSABLE type. */
19095 tree tmp
= create_tmp_var_raw (type
, "vol");
19096 gimple_add_tmp_var (tmp
);
19097 gimplify_assign (tmp
, *expr_p
, pre_p
);
19101 /* We can't do anything useful with a volatile reference to
19102 an incomplete type, so just throw it away. Likewise for
19103 a BLKmode type, since any implicit inner load should
19104 already have been turned into an explicit one by the
19105 gimplification process. */
19109 /* If we are gimplifying at the statement level, we're done. Tack
19110 everything together and return. */
19111 if (fallback
== fb_none
|| is_statement
)
19113 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
19114 it out for GC to reclaim it. */
19115 *expr_p
= NULL_TREE
;
19117 if (!gimple_seq_empty_p (internal_pre
)
19118 || !gimple_seq_empty_p (internal_post
))
19120 gimplify_seq_add_seq (&internal_pre
, internal_post
);
19121 gimplify_seq_add_seq (pre_p
, internal_pre
);
19124 /* The result of gimplifying *EXPR_P is going to be the last few
19125 statements in *PRE_P and *POST_P. Add location information
19126 to all the statements that were added by the gimplification
19128 if (!gimple_seq_empty_p (*pre_p
))
19129 annotate_all_with_location_after (*pre_p
, pre_last_gsi
, input_location
);
19131 if (!gimple_seq_empty_p (*post_p
))
19132 annotate_all_with_location_after (*post_p
, post_last_gsi
,
19138 #ifdef ENABLE_GIMPLE_CHECKING
19141 enum tree_code code
= TREE_CODE (*expr_p
);
19142 /* These expressions should already be in gimple IR form. */
19143 gcc_assert (code
!= MODIFY_EXPR
19144 && code
!= ASM_EXPR
19145 && code
!= BIND_EXPR
19146 && code
!= CATCH_EXPR
19147 && (code
!= COND_EXPR
|| gimplify_ctxp
->allow_rhs_cond_expr
)
19148 && code
!= EH_FILTER_EXPR
19149 && code
!= GOTO_EXPR
19150 && code
!= LABEL_EXPR
19151 && code
!= LOOP_EXPR
19152 && code
!= SWITCH_EXPR
19153 && code
!= TRY_FINALLY_EXPR
19154 && code
!= EH_ELSE_EXPR
19155 && code
!= OACC_PARALLEL
19156 && code
!= OACC_KERNELS
19157 && code
!= OACC_SERIAL
19158 && code
!= OACC_DATA
19159 && code
!= OACC_HOST_DATA
19160 && code
!= OACC_DECLARE
19161 && code
!= OACC_UPDATE
19162 && code
!= OACC_ENTER_DATA
19163 && code
!= OACC_EXIT_DATA
19164 && code
!= OACC_CACHE
19165 && code
!= OMP_CRITICAL
19167 && code
!= OACC_LOOP
19168 && code
!= OMP_MASTER
19169 && code
!= OMP_MASKED
19170 && code
!= OMP_TASKGROUP
19171 && code
!= OMP_ORDERED
19172 && code
!= OMP_PARALLEL
19173 && code
!= OMP_SCAN
19174 && code
!= OMP_SECTIONS
19175 && code
!= OMP_SECTION
19176 && code
!= OMP_STRUCTURED_BLOCK
19177 && code
!= OMP_SINGLE
19178 && code
!= OMP_SCOPE
);
19182 /* Otherwise we're gimplifying a subexpression, so the resulting
19183 value is interesting. If it's a valid operand that matches
19184 GIMPLE_TEST_F, we're done. Unless we are handling some
19185 post-effects internally; if that's the case, we need to copy into
19186 a temporary before adding the post-effects to POST_P. */
19187 if (gimple_seq_empty_p (internal_post
) && (*gimple_test_f
) (*expr_p
))
19190 /* Otherwise, we need to create a new temporary for the gimplified
19193 /* We can't return an lvalue if we have an internal postqueue. The
19194 object the lvalue refers to would (probably) be modified by the
19195 postqueue; we need to copy the value out first, which means an
19197 if ((fallback
& fb_lvalue
)
19198 && gimple_seq_empty_p (internal_post
)
19199 && is_gimple_addressable (*expr_p
))
19201 /* An lvalue will do. Take the address of the expression, store it
19202 in a temporary, and replace the expression with an INDIRECT_REF of
19204 tree ref_alias_type
= reference_alias_ptr_type (*expr_p
);
19205 unsigned int ref_align
= get_object_alignment (*expr_p
);
19206 tree ref_type
= TREE_TYPE (*expr_p
);
19207 tmp
= build_fold_addr_expr_loc (input_location
, *expr_p
);
19208 gimplify_expr (&tmp
, pre_p
, post_p
, is_gimple_reg
, fb_rvalue
);
19209 if (TYPE_ALIGN (ref_type
) != ref_align
)
19210 ref_type
= build_aligned_type (ref_type
, ref_align
);
19211 *expr_p
= build2 (MEM_REF
, ref_type
,
19212 tmp
, build_zero_cst (ref_alias_type
));
19214 else if ((fallback
& fb_rvalue
) && is_gimple_reg_rhs_or_call (*expr_p
))
19216 /* An rvalue will do. Assign the gimplified expression into a
19217 new temporary TMP and replace the original expression with
19218 TMP. First, make sure that the expression has a type so that
19219 it can be assigned into a temporary. */
19220 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p
)));
19221 *expr_p
= get_formal_tmp_var (*expr_p
, pre_p
);
19225 #ifdef ENABLE_GIMPLE_CHECKING
19226 if (!(fallback
& fb_mayfail
))
19228 fprintf (stderr
, "gimplification failed:\n");
19229 print_generic_expr (stderr
, *expr_p
);
19230 debug_tree (*expr_p
);
19231 internal_error ("gimplification failed");
19234 gcc_assert (fallback
& fb_mayfail
);
19236 /* If this is an asm statement, and the user asked for the
19237 impossible, don't die. Fail and let gimplify_asm_expr
19243 /* Make sure the temporary matches our predicate. */
19244 gcc_assert ((*gimple_test_f
) (*expr_p
));
19246 if (!gimple_seq_empty_p (internal_post
))
19248 annotate_all_with_location (internal_post
, input_location
);
19249 gimplify_seq_add_seq (pre_p
, internal_post
);
19253 input_location
= saved_location
;
19257 /* Like gimplify_expr but make sure the gimplified result is not itself
19258 a SSA name (but a decl if it were). Temporaries required by
19259 evaluating *EXPR_P may be still SSA names. */
19261 static enum gimplify_status
19262 gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
19263 bool (*gimple_test_f
) (tree
), fallback_t fallback
,
19266 enum gimplify_status ret
= gimplify_expr (expr_p
, pre_p
, post_p
,
19267 gimple_test_f
, fallback
);
19269 && TREE_CODE (*expr_p
) == SSA_NAME
)
19270 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, NULL
, false);
19274 /* Look through TYPE for variable-sized objects and gimplify each such
19275 size that we find. Add to LIST_P any statements generated. */
19278 gimplify_type_sizes (tree type
, gimple_seq
*list_p
)
19280 if (type
== NULL
|| type
== error_mark_node
)
19283 const bool ignored_p
19285 && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
19286 && DECL_IGNORED_P (TYPE_NAME (type
));
19289 /* We first do the main variant, then copy into any other variants. */
19290 type
= TYPE_MAIN_VARIANT (type
);
19292 /* Avoid infinite recursion. */
19293 if (TYPE_SIZES_GIMPLIFIED (type
))
19296 TYPE_SIZES_GIMPLIFIED (type
) = 1;
19298 switch (TREE_CODE (type
))
19301 case ENUMERAL_TYPE
:
19304 case FIXED_POINT_TYPE
:
19305 gimplify_one_sizepos (&TYPE_MIN_VALUE (type
), list_p
);
19306 gimplify_one_sizepos (&TYPE_MAX_VALUE (type
), list_p
);
19308 for (t
= TYPE_NEXT_VARIANT (type
); t
; t
= TYPE_NEXT_VARIANT (t
))
19310 TYPE_MIN_VALUE (t
) = TYPE_MIN_VALUE (type
);
19311 TYPE_MAX_VALUE (t
) = TYPE_MAX_VALUE (type
);
19316 /* These types may not have declarations, so handle them here. */
19317 gimplify_type_sizes (TREE_TYPE (type
), list_p
);
19318 gimplify_type_sizes (TYPE_DOMAIN (type
), list_p
);
19319 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
19320 with assigned stack slots, for -O1+ -g they should be tracked
19323 && TYPE_DOMAIN (type
)
19324 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type
)))
19326 t
= TYPE_MIN_VALUE (TYPE_DOMAIN (type
));
19327 if (t
&& VAR_P (t
) && DECL_ARTIFICIAL (t
))
19328 DECL_IGNORED_P (t
) = 0;
19329 t
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
19330 if (t
&& VAR_P (t
) && DECL_ARTIFICIAL (t
))
19331 DECL_IGNORED_P (t
) = 0;
19337 case QUAL_UNION_TYPE
:
19338 for (tree field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
19339 if (TREE_CODE (field
) == FIELD_DECL
)
19341 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field
), list_p
);
19342 /* Likewise, ensure variable offsets aren't removed. */
19344 && (t
= DECL_FIELD_OFFSET (field
))
19346 && DECL_ARTIFICIAL (t
))
19347 DECL_IGNORED_P (t
) = 0;
19348 gimplify_one_sizepos (&DECL_SIZE (field
), list_p
);
19349 gimplify_one_sizepos (&DECL_SIZE_UNIT (field
), list_p
);
19350 gimplify_type_sizes (TREE_TYPE (field
), list_p
);
19355 case REFERENCE_TYPE
:
19356 /* We used to recurse on the pointed-to type here, which turned out to
19357 be incorrect because its definition might refer to variables not
19358 yet initialized at this point if a forward declaration is involved.
19360 It was actually useful for anonymous pointed-to types to ensure
19361 that the sizes evaluation dominates every possible later use of the
19362 values. Restricting to such types here would be safe since there
19363 is no possible forward declaration around, but would introduce an
19364 undesirable middle-end semantic to anonymity. We then defer to
19365 front-ends the responsibility of ensuring that the sizes are
19366 evaluated both early and late enough, e.g. by attaching artificial
19367 type declarations to the tree. */
19374 gimplify_one_sizepos (&TYPE_SIZE (type
), list_p
);
19375 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type
), list_p
);
19377 for (t
= TYPE_NEXT_VARIANT (type
); t
; t
= TYPE_NEXT_VARIANT (t
))
19379 TYPE_SIZE (t
) = TYPE_SIZE (type
);
19380 TYPE_SIZE_UNIT (t
) = TYPE_SIZE_UNIT (type
);
19381 TYPE_SIZES_GIMPLIFIED (t
) = 1;
19385 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
19386 a size or position, has had all of its SAVE_EXPRs evaluated.
19387 We add any required statements to *STMT_P. */
19390 gimplify_one_sizepos (tree
*expr_p
, gimple_seq
*stmt_p
)
19392 tree expr
= *expr_p
;
19394 /* We don't do anything if the value isn't there, is constant, or contains
19395 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
19396 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
19397 will want to replace it with a new variable, but that will cause problems
19398 if this type is from outside the function. It's OK to have that here. */
19399 if (expr
== NULL_TREE
19400 || is_gimple_constant (expr
)
19402 || CONTAINS_PLACEHOLDER_P (expr
))
19405 *expr_p
= unshare_expr (expr
);
19407 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
19408 if the def vanishes. */
19409 gimplify_expr (expr_p
, stmt_p
, NULL
, is_gimple_val
, fb_rvalue
, false);
19411 /* If expr wasn't already is_gimple_sizepos or is_gimple_constant from the
19412 FE, ensure that it is a VAR_DECL, otherwise we might handle some decls
19413 as gimplify_vla_decl even when they would have all sizes INTEGER_CSTs. */
19414 if (is_gimple_constant (*expr_p
))
19415 *expr_p
= get_initialized_tmp_var (*expr_p
, stmt_p
, NULL
, false);
19418 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
19419 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
19420 is true, also gimplify the parameters. */
19423 gimplify_body (tree fndecl
, bool do_parms
)
19425 location_t saved_location
= input_location
;
19426 gimple_seq parm_stmts
, parm_cleanup
= NULL
, seq
;
19427 gimple
*outer_stmt
;
19430 timevar_push (TV_TREE_GIMPLIFY
);
19432 init_tree_ssa (cfun
);
19434 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
19436 default_rtl_profile ();
19438 gcc_assert (gimplify_ctxp
== NULL
);
19439 push_gimplify_context (true);
19441 if (flag_openacc
|| flag_openmp
)
19443 gcc_assert (gimplify_omp_ctxp
== NULL
);
19444 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl
)))
19445 gimplify_omp_ctxp
= new_omp_context (ORT_IMPLICIT_TARGET
);
19448 /* Unshare most shared trees in the body and in that of any nested functions.
19449 It would seem we don't have to do this for nested functions because
19450 they are supposed to be output and then the outer function gimplified
19451 first, but the g++ front end doesn't always do it that way. */
19452 unshare_body (fndecl
);
19453 unvisit_body (fndecl
);
19455 /* Make sure input_location isn't set to something weird. */
19456 input_location
= DECL_SOURCE_LOCATION (fndecl
);
19458 /* Resolve callee-copies. This has to be done before processing
19459 the body so that DECL_VALUE_EXPR gets processed correctly. */
19460 parm_stmts
= do_parms
? gimplify_parameters (&parm_cleanup
) : NULL
;
19462 /* Gimplify the function's body. */
19464 gimplify_stmt (&DECL_SAVED_TREE (fndecl
), &seq
);
19465 outer_stmt
= gimple_seq_first_nondebug_stmt (seq
);
19468 outer_stmt
= gimple_build_nop ();
19469 gimplify_seq_add_stmt (&seq
, outer_stmt
);
19472 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
19473 not the case, wrap everything in a GIMPLE_BIND to make it so. */
19474 if (gimple_code (outer_stmt
) == GIMPLE_BIND
19475 && (gimple_seq_first_nondebug_stmt (seq
)
19476 == gimple_seq_last_nondebug_stmt (seq
)))
19478 outer_bind
= as_a
<gbind
*> (outer_stmt
);
19479 if (gimple_seq_first_stmt (seq
) != outer_stmt
19480 || gimple_seq_last_stmt (seq
) != outer_stmt
)
19482 /* If there are debug stmts before or after outer_stmt, move them
19483 inside of outer_bind body. */
19484 gimple_stmt_iterator gsi
= gsi_for_stmt (outer_stmt
, &seq
);
19485 gimple_seq second_seq
= NULL
;
19486 if (gimple_seq_first_stmt (seq
) != outer_stmt
19487 && gimple_seq_last_stmt (seq
) != outer_stmt
)
19489 second_seq
= gsi_split_seq_after (gsi
);
19490 gsi_remove (&gsi
, false);
19492 else if (gimple_seq_first_stmt (seq
) != outer_stmt
)
19493 gsi_remove (&gsi
, false);
19496 gsi_remove (&gsi
, false);
19500 gimple_seq_add_seq_without_update (&seq
,
19501 gimple_bind_body (outer_bind
));
19502 gimple_seq_add_seq_without_update (&seq
, second_seq
);
19503 gimple_bind_set_body (outer_bind
, seq
);
19507 outer_bind
= gimple_build_bind (NULL_TREE
, seq
, NULL
);
19509 DECL_SAVED_TREE (fndecl
) = NULL_TREE
;
19511 /* If we had callee-copies statements, insert them at the beginning
19512 of the function and clear DECL_HAS_VALUE_EXPR_P on the parameters. */
19513 if (!gimple_seq_empty_p (parm_stmts
))
19517 gimplify_seq_add_seq (&parm_stmts
, gimple_bind_body (outer_bind
));
19520 gtry
*g
= gimple_build_try (parm_stmts
, parm_cleanup
,
19521 GIMPLE_TRY_FINALLY
);
19523 gimple_seq_add_stmt (&parm_stmts
, g
);
19525 gimple_bind_set_body (outer_bind
, parm_stmts
);
19527 for (parm
= DECL_ARGUMENTS (current_function_decl
);
19528 parm
; parm
= DECL_CHAIN (parm
))
19529 if (DECL_HAS_VALUE_EXPR_P (parm
))
19531 DECL_HAS_VALUE_EXPR_P (parm
) = 0;
19532 DECL_IGNORED_P (parm
) = 0;
19536 if ((flag_openacc
|| flag_openmp
|| flag_openmp_simd
)
19537 && gimplify_omp_ctxp
)
19539 delete_omp_context (gimplify_omp_ctxp
);
19540 gimplify_omp_ctxp
= NULL
;
19543 pop_gimplify_context (outer_bind
);
19544 gcc_assert (gimplify_ctxp
== NULL
);
19546 if (flag_checking
&& !seen_error ())
19547 verify_gimple_in_seq (gimple_bind_body (outer_bind
));
19549 timevar_pop (TV_TREE_GIMPLIFY
);
19550 input_location
= saved_location
;
19555 typedef char *char_p
; /* For DEF_VEC_P. */
19557 /* Return whether we should exclude FNDECL from instrumentation. */
19560 flag_instrument_functions_exclude_p (tree fndecl
)
19564 v
= (vec
<char_p
> *) flag_instrument_functions_exclude_functions
;
19565 if (v
&& v
->length () > 0)
19571 name
= lang_hooks
.decl_printable_name (fndecl
, 1);
19572 FOR_EACH_VEC_ELT (*v
, i
, s
)
19573 if (strstr (name
, s
) != NULL
)
19577 v
= (vec
<char_p
> *) flag_instrument_functions_exclude_files
;
19578 if (v
&& v
->length () > 0)
19584 name
= DECL_SOURCE_FILE (fndecl
);
19585 FOR_EACH_VEC_ELT (*v
, i
, s
)
19586 if (strstr (name
, s
) != NULL
)
19593 /* Build a call to the instrumentation function FNCODE and add it to SEQ.
19594 If COND_VAR is not NULL, it is a boolean variable guarding the call to
19595 the instrumentation function. IF STMT is not NULL, it is a statement
19596 to be executed just before the call to the instrumentation function. */
19599 build_instrumentation_call (gimple_seq
*seq
, enum built_in_function fncode
,
19600 tree cond_var
, gimple
*stmt
)
19602 /* The instrumentation hooks aren't going to call the instrumented
19603 function and the address they receive is expected to be matchable
19604 against symbol addresses. Make sure we don't create a trampoline,
19605 in case the current function is nested. */
19606 tree this_fn_addr
= build_fold_addr_expr (current_function_decl
);
19607 TREE_NO_TRAMPOLINE (this_fn_addr
) = 1;
19609 tree label_true
, label_false
;
19612 label_true
= create_artificial_label (UNKNOWN_LOCATION
);
19613 label_false
= create_artificial_label (UNKNOWN_LOCATION
);
19614 gcond
*cond
= gimple_build_cond (EQ_EXPR
, cond_var
, boolean_false_node
,
19615 label_true
, label_false
);
19616 gimplify_seq_add_stmt (seq
, cond
);
19617 gimplify_seq_add_stmt (seq
, gimple_build_label (label_true
));
19618 gimplify_seq_add_stmt (seq
, gimple_build_predict (PRED_COLD_LABEL
,
19623 gimplify_seq_add_stmt (seq
, stmt
);
19625 tree x
= builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS
);
19626 gcall
*call
= gimple_build_call (x
, 1, integer_zero_node
);
19627 tree tmp_var
= create_tmp_var (ptr_type_node
, "return_addr");
19628 gimple_call_set_lhs (call
, tmp_var
);
19629 gimplify_seq_add_stmt (seq
, call
);
19630 x
= builtin_decl_implicit (fncode
);
19631 call
= gimple_build_call (x
, 2, this_fn_addr
, tmp_var
);
19632 gimplify_seq_add_stmt (seq
, call
);
19635 gimplify_seq_add_stmt (seq
, gimple_build_label (label_false
));
19638 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
19639 node for the function we want to gimplify.
19641 Return the sequence of GIMPLE statements corresponding to the body
19645 gimplify_function_tree (tree fndecl
)
19650 gcc_assert (!gimple_body (fndecl
));
19652 if (DECL_STRUCT_FUNCTION (fndecl
))
19653 push_cfun (DECL_STRUCT_FUNCTION (fndecl
));
19655 push_struct_function (fndecl
);
19659 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
19661 cfun
->curr_properties
|= PROP_gimple_lva
;
19663 if (asan_sanitize_use_after_scope ())
19664 asan_poisoned_variables
= new hash_set
<tree
> ();
19665 bind
= gimplify_body (fndecl
, true);
19666 if (asan_poisoned_variables
)
19668 delete asan_poisoned_variables
;
19669 asan_poisoned_variables
= NULL
;
19672 /* The tree body of the function is no longer needed, replace it
19673 with the new GIMPLE body. */
19675 gimple_seq_add_stmt (&seq
, bind
);
19676 gimple_set_body (fndecl
, seq
);
19678 /* If we're instrumenting function entry/exit, then prepend the call to
19679 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
19680 catch the exit hook. */
19681 /* ??? Add some way to ignore exceptions for this TFE. */
19682 if (flag_instrument_function_entry_exit
19683 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl
)
19684 /* Do not instrument extern inline functions. */
19685 && !(DECL_DECLARED_INLINE_P (fndecl
)
19686 && DECL_EXTERNAL (fndecl
)
19687 && DECL_DISREGARD_INLINE_LIMITS (fndecl
))
19688 && !flag_instrument_functions_exclude_p (fndecl
))
19690 gimple_seq body
= NULL
, cleanup
= NULL
;
19694 /* If -finstrument-functions-once is specified, generate:
19696 static volatile bool C.0 = false;
19703 [call profiling enter function]
19706 without specific protection for data races. */
19707 if (flag_instrument_function_entry_exit
> 1)
19710 = build_decl (DECL_SOURCE_LOCATION (current_function_decl
),
19712 create_tmp_var_name ("C"),
19713 boolean_type_node
);
19714 DECL_ARTIFICIAL (first_var
) = 1;
19715 DECL_IGNORED_P (first_var
) = 1;
19716 TREE_STATIC (first_var
) = 1;
19717 TREE_THIS_VOLATILE (first_var
) = 1;
19718 TREE_USED (first_var
) = 1;
19719 DECL_INITIAL (first_var
) = boolean_false_node
;
19720 varpool_node::add (first_var
);
19722 cond_var
= create_tmp_var (boolean_type_node
, "tmp_called");
19723 assign
= gimple_build_assign (cond_var
, first_var
);
19724 gimplify_seq_add_stmt (&body
, assign
);
19726 assign
= gimple_build_assign (first_var
, boolean_true_node
);
19731 cond_var
= NULL_TREE
;
19735 build_instrumentation_call (&body
, BUILT_IN_PROFILE_FUNC_ENTER
,
19738 /* If -finstrument-functions-once is specified, generate:
19741 [call profiling exit function]
19743 without specific protection for data races. */
19744 build_instrumentation_call (&cleanup
, BUILT_IN_PROFILE_FUNC_EXIT
,
19747 gimple
*tf
= gimple_build_try (seq
, cleanup
, GIMPLE_TRY_FINALLY
);
19748 gimplify_seq_add_stmt (&body
, tf
);
19749 gbind
*new_bind
= gimple_build_bind (NULL
, body
, NULL
);
19751 /* Replace the current function body with the body
19752 wrapped in the try/finally TF. */
19754 gimple_seq_add_stmt (&seq
, new_bind
);
19755 gimple_set_body (fndecl
, seq
);
19759 if (sanitize_flags_p (SANITIZE_THREAD
)
19760 && param_tsan_instrument_func_entry_exit
)
19762 gcall
*call
= gimple_build_call_internal (IFN_TSAN_FUNC_EXIT
, 0);
19763 gimple
*tf
= gimple_build_try (seq
, call
, GIMPLE_TRY_FINALLY
);
19764 gbind
*new_bind
= gimple_build_bind (NULL
, tf
, NULL
);
19765 /* Replace the current function body with the body
19766 wrapped in the try/finally TF. */
19768 gimple_seq_add_stmt (&seq
, new_bind
);
19769 gimple_set_body (fndecl
, seq
);
19772 DECL_SAVED_TREE (fndecl
) = NULL_TREE
;
19773 cfun
->curr_properties
|= PROP_gimple_any
;
19777 dump_function (TDI_gimple
, fndecl
);
19780 /* Return a dummy expression of type TYPE in order to keep going after an
19784 dummy_object (tree type
)
19786 tree t
= build_int_cst (build_pointer_type (type
), 0);
19787 return build2 (MEM_REF
, type
, t
, t
);
19790 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
19791 builtin function, but a very special sort of operator. */
19793 enum gimplify_status
19794 gimplify_va_arg_expr (tree
*expr_p
, gimple_seq
*pre_p
,
19795 gimple_seq
*post_p ATTRIBUTE_UNUSED
)
19797 tree promoted_type
, have_va_type
;
19798 tree valist
= TREE_OPERAND (*expr_p
, 0);
19799 tree type
= TREE_TYPE (*expr_p
);
19800 tree t
, tag
, aptag
;
19801 location_t loc
= EXPR_LOCATION (*expr_p
);
19803 /* Verify that valist is of the proper type. */
19804 have_va_type
= TREE_TYPE (valist
);
19805 if (have_va_type
== error_mark_node
)
19807 have_va_type
= targetm
.canonical_va_list_type (have_va_type
);
19808 if (have_va_type
== NULL_TREE
19809 && POINTER_TYPE_P (TREE_TYPE (valist
)))
19810 /* Handle 'Case 1: Not an array type' from c-common.cc/build_va_arg. */
19812 = targetm
.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist
)));
19813 gcc_assert (have_va_type
!= NULL_TREE
);
19815 /* Generate a diagnostic for requesting data of a type that cannot
19816 be passed through `...' due to type promotion at the call site. */
19817 if ((promoted_type
= lang_hooks
.types
.type_promotes_to (type
))
19820 static bool gave_help
;
19822 /* Use the expansion point to handle cases such as passing bool (defined
19823 in a system header) through `...'. */
19825 = expansion_point_location_if_in_system_header (loc
);
19827 /* Unfortunately, this is merely undefined, rather than a constraint
19828 violation, so we cannot make this an error. If this call is never
19829 executed, the program is still strictly conforming. */
19830 auto_diagnostic_group d
;
19831 warned
= warning_at (xloc
, 0,
19832 "%qT is promoted to %qT when passed through %<...%>",
19833 type
, promoted_type
);
19834 if (!gave_help
&& warned
)
19837 inform (xloc
, "(so you should pass %qT not %qT to %<va_arg%>)",
19838 promoted_type
, type
);
19841 /* We can, however, treat "undefined" any way we please.
19842 Call abort to encourage the user to fix the program. */
19844 inform (xloc
, "if this code is reached, the program will abort");
19845 /* Before the abort, allow the evaluation of the va_list
19846 expression to exit or longjmp. */
19847 gimplify_and_add (valist
, pre_p
);
19848 t
= build_call_expr_loc (loc
,
19849 builtin_decl_implicit (BUILT_IN_TRAP
), 0);
19850 gimplify_and_add (t
, pre_p
);
19852 /* This is dead code, but go ahead and finish so that the
19853 mode of the result comes out right. */
19854 *expr_p
= dummy_object (type
);
19855 return GS_ALL_DONE
;
19858 tag
= build_int_cst (build_pointer_type (type
), 0);
19859 aptag
= build_int_cst (TREE_TYPE (valist
), 0);
19861 *expr_p
= build_call_expr_internal_loc (loc
, IFN_VA_ARG
, type
, 3,
19862 valist
, tag
, aptag
);
19864 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
19865 needs to be expanded. */
19866 cfun
->curr_properties
&= ~PROP_gimple_lva
;
19871 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
19873 DST/SRC are the destination and source respectively. You can pass
19874 ungimplified trees in DST or SRC, in which case they will be
19875 converted to a gimple operand if necessary.
19877 This function returns the newly created GIMPLE_ASSIGN tuple. */
19880 gimplify_assign (tree dst
, tree src
, gimple_seq
*seq_p
)
19882 tree t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
19883 gimplify_and_add (t
, seq_p
);
19885 return gimple_seq_last_stmt (*seq_p
);
19889 gimplify_hasher::hash (const elt_t
*p
)
19892 return iterative_hash_expr (t
, 0);
19896 gimplify_hasher::equal (const elt_t
*p1
, const elt_t
*p2
)
19900 enum tree_code code
= TREE_CODE (t1
);
19902 if (TREE_CODE (t2
) != code
19903 || TREE_TYPE (t1
) != TREE_TYPE (t2
))
19906 if (!operand_equal_p (t1
, t2
, 0))