1 /* Nested function decomposition for GIMPLE.
2 Copyright (C) 2004-2025 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
30 #include "stringpool.h"
32 #include "fold-const.h"
33 #include "stor-layout.h"
35 #include "tree-inline.h"
37 #include "gimple-iterator.h"
38 #include "gimple-walk.h"
41 #include "langhooks.h"
42 #include "gimple-low.h"
43 #include "gomp-constants.h"
44 #include "diagnostic.h"
45 #include "alloc-pool.h"
46 #include "tree-nested.h"
47 #include "symbol-summary.h"
48 #include "symtab-thunks.h"
51 /* Summary of nested functions. */
52 static function_summary
<nested_function_info
*>
53 *nested_function_sum
= NULL
;
55 /* Return nested_function_info, if available. */
56 nested_function_info
*
57 nested_function_info::get (cgraph_node
*node
)
59 if (!nested_function_sum
)
61 return nested_function_sum
->get (node
);
64 /* Return nested_function_info possibly creating new one. */
65 nested_function_info
*
66 nested_function_info::get_create (cgraph_node
*node
)
68 if (!nested_function_sum
)
70 nested_function_sum
= new function_summary
<nested_function_info
*>
72 nested_function_sum
->disable_insertion_hook ();
74 return nested_function_sum
->get_create (node
);
77 /* cgraph_node is no longer nested function; update cgraph accordingly. */
79 unnest_function (cgraph_node
*node
)
81 nested_function_info
*info
= nested_function_info::get (node
);
82 cgraph_node
**node2
= &nested_function_info::get
83 (nested_function_origin (node
))->nested
;
85 gcc_checking_assert (info
->origin
);
86 while (*node2
!= node
)
87 node2
= &nested_function_info::get (*node2
)->next_nested
;
88 *node2
= info
->next_nested
;
89 info
->next_nested
= NULL
;
91 nested_function_sum
->remove (node
);
94 /* Destructor: unlink function from nested function lists. */
95 nested_function_info::~nested_function_info ()
98 for (cgraph_node
*n
= nested
; n
; n
= next
)
100 nested_function_info
*info
= nested_function_info::get (n
);
101 next
= info
->next_nested
;
103 info
->next_nested
= NULL
;
109 = &nested_function_info::get (origin
)->nested
;
111 nested_function_info
*info
;
112 while ((info
= nested_function_info::get (*node2
)) != this && info
)
113 node2
= &info
->next_nested
;
114 *node2
= next_nested
;
118 /* Free nested function info summaries. */
120 nested_function_info::release ()
122 if (nested_function_sum
)
123 delete (nested_function_sum
);
124 nested_function_sum
= NULL
;
127 /* If NODE is nested function, record it. */
129 maybe_record_nested_function (cgraph_node
*node
)
131 /* All nested functions gets lowered during the construction of symtab. */
132 if (symtab
->state
> CONSTRUCTION
)
134 if (DECL_CONTEXT (node
->decl
)
135 && TREE_CODE (DECL_CONTEXT (node
->decl
)) == FUNCTION_DECL
)
137 cgraph_node
*origin
= cgraph_node::get_create (DECL_CONTEXT (node
->decl
));
138 nested_function_info
*info
= nested_function_info::get_create (node
);
139 nested_function_info
*origin_info
140 = nested_function_info::get_create (origin
);
142 info
->origin
= origin
;
143 info
->next_nested
= origin_info
->nested
;
144 origin_info
->nested
= node
;
148 /* The object of this pass is to lower the representation of a set of nested
149 functions in order to expose all of the gory details of the various
150 nonlocal references. We want to do this sooner rather than later, in
151 order to give us more freedom in emitting all of the functions in question.
153 Back in olden times, when gcc was young, we developed an insanely
154 complicated scheme whereby variables which were referenced nonlocally
155 were forced to live in the stack of the declaring function, and then
156 the nested functions magically discovered where these variables were
157 placed. In order for this scheme to function properly, it required
158 that the outer function be partially expanded, then we switch to
159 compiling the inner function, and once done with those we switch back
160 to compiling the outer function. Such delicate ordering requirements
161 makes it difficult to do whole translation unit optimizations
162 involving such functions.
164 The implementation here is much more direct. Everything that can be
165 referenced by an inner function is a member of an explicitly created
166 structure herein called the "nonlocal frame struct". The incoming
167 static chain for a nested function is a pointer to this struct in
168 the parent. In this way, we settle on known offsets from a known
169 base, and so are decoupled from the logic that places objects in the
170 function's stack frame. More importantly, we don't have to wait for
171 that to happen -- since the compilation of the inner function is no
172 longer tied to a real stack frame, the nonlocal frame struct can be
173 allocated anywhere. Which means that the outer function is now
176 Theory of operation here is very simple. Iterate over all the
177 statements in all the functions (depth first) several times,
178 allocating structures and fields on demand. In general we want to
179 examine inner functions first, so that we can avoid making changes
180 to outer functions which are unnecessary.
182 The order of the passes matters a bit, in that later passes will be
183 skipped if it is discovered that the functions don't actually interact
184 at all. That is, they're nested in the lexical sense but could have
185 been written as independent functions without change. */
190 struct nesting_info
*outer
;
191 struct nesting_info
*inner
;
192 struct nesting_info
*next
;
194 hash_map
<tree
, tree
> *field_map
;
195 hash_map
<tree
, tree
> *var_map
;
196 hash_set
<tree
*> *mem_refs
;
197 bitmap suppress_expansion
;
200 tree new_local_var_chain
;
201 tree debug_var_chain
;
209 bool any_parm_remapped
;
210 bool any_tramp_created
;
211 bool any_descr_created
;
212 char static_chain_added
;
216 /* Iterate over the nesting tree, starting with ROOT, depth first. */
218 static inline struct nesting_info
*
219 iter_nestinfo_start (struct nesting_info
*root
)
226 static inline struct nesting_info
*
227 iter_nestinfo_next (struct nesting_info
*node
)
230 return iter_nestinfo_start (node
->next
);
234 #define FOR_EACH_NEST_INFO(I, ROOT) \
235 for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
237 /* Obstack used for the bitmaps in the struct above. */
238 static struct bitmap_obstack nesting_info_bitmap_obstack
;
241 /* We're working in so many different function contexts simultaneously,
242 that create_tmp_var is dangerous. Prevent mishap. */
243 #define create_tmp_var cant_use_create_tmp_var_here_dummy
245 /* Like create_tmp_var, except record the variable for registration at
246 the given nesting level. */
249 create_tmp_var_for (struct nesting_info
*info
, tree type
, const char *prefix
)
253 /* If the type is of variable size or a type which must be created by the
254 frontend, something is wrong. Note that we explicitly allow
255 incomplete types here, since we create them ourselves here. */
256 gcc_assert (!TREE_ADDRESSABLE (type
));
257 gcc_assert (!TYPE_SIZE_UNIT (type
)
258 || TREE_CODE (TYPE_SIZE_UNIT (type
)) == INTEGER_CST
);
260 tmp_var
= create_tmp_var_raw (type
, prefix
);
261 DECL_CONTEXT (tmp_var
) = info
->context
;
262 DECL_CHAIN (tmp_var
) = info
->new_local_var_chain
;
263 DECL_SEEN_IN_BIND_EXPR_P (tmp_var
) = 1;
265 info
->new_local_var_chain
= tmp_var
;
270 /* Like build_simple_mem_ref, but set TREE_THIS_NOTRAP on the result. */
273 build_simple_mem_ref_notrap (tree ptr
)
275 tree t
= build_simple_mem_ref (ptr
);
276 TREE_THIS_NOTRAP (t
) = 1;
280 /* Take the address of EXP to be used within function CONTEXT.
281 Mark it for addressability as necessary. */
284 build_addr (tree exp
)
286 mark_addressable (exp
);
287 return build_fold_addr_expr (exp
);
290 /* Insert FIELD into TYPE, sorted by alignment requirements. */
293 insert_field_into_struct (tree type
, tree field
)
297 DECL_CONTEXT (field
) = type
;
299 for (p
= &TYPE_FIELDS (type
); *p
; p
= &DECL_CHAIN (*p
))
300 if (DECL_ALIGN (field
) >= DECL_ALIGN (*p
))
303 DECL_CHAIN (field
) = *p
;
306 /* Set correct alignment for frame struct type. */
307 if (TYPE_ALIGN (type
) < DECL_ALIGN (field
))
308 SET_TYPE_ALIGN (type
, DECL_ALIGN (field
));
311 /* Build or return the RECORD_TYPE that describes the frame state that is
312 shared between INFO->CONTEXT and its nested functions. This record will
313 not be complete until finalize_nesting_tree; up until that point we'll
314 be adding fields as necessary.
316 We also build the DECL that represents this frame in the function. */
319 get_frame_type (struct nesting_info
*info
)
321 tree type
= info
->frame_type
;
326 type
= make_node (RECORD_TYPE
);
328 name
= concat ("FRAME.",
329 IDENTIFIER_POINTER (DECL_NAME (info
->context
)),
331 TYPE_NAME (type
) = get_identifier (name
);
334 info
->frame_type
= type
;
336 /* Do not put info->frame_decl on info->new_local_var_chain,
337 so that we can declare it in the lexical blocks, which
338 makes sure virtual regs that end up appearing in its RTL
339 expression get substituted in instantiate_virtual_regs. */
340 info
->frame_decl
= create_tmp_var_raw (type
, "FRAME");
341 DECL_CONTEXT (info
->frame_decl
) = info
->context
;
342 DECL_NONLOCAL_FRAME (info
->frame_decl
) = 1;
343 DECL_SEEN_IN_BIND_EXPR_P (info
->frame_decl
) = 1;
345 /* ??? Always make it addressable for now, since it is meant to
346 be pointed to by the static chain pointer. This pessimizes
347 when it turns out that no static chains are needed because
348 the nested functions referencing non-local variables are not
349 reachable, but the true pessimization is to create the non-
350 local frame structure in the first place. */
351 TREE_ADDRESSABLE (info
->frame_decl
) = 1;
357 /* Return true if DECL should be referenced by pointer in the non-local frame
361 use_pointer_in_frame (tree decl
)
363 if (TREE_CODE (decl
) == PARM_DECL
)
365 /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable-
366 sized DECLs, and inefficient to copy large aggregates. Don't bother
367 moving anything but scalar parameters. */
368 return AGGREGATE_TYPE_P (TREE_TYPE (decl
));
372 /* Variable-sized DECLs can only come from OMP clauses at this point
373 since the gimplifier has already turned the regular variables into
374 pointers. Do the same as the gimplifier. */
375 return !DECL_SIZE (decl
) || TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
;
379 /* Given DECL, a non-locally accessed variable, find or create a field
380 in the non-local frame structure for the given nesting context. */
383 lookup_field_for_decl (struct nesting_info
*info
, tree decl
,
384 enum insert_option insert
)
386 gcc_checking_assert (decl_function_context (decl
) == info
->context
);
388 if (insert
== NO_INSERT
)
390 tree
*slot
= info
->field_map
->get (decl
);
391 return slot
? *slot
: NULL_TREE
;
394 tree
*slot
= &info
->field_map
->get_or_insert (decl
);
397 tree type
= get_frame_type (info
);
398 tree field
= make_node (FIELD_DECL
);
399 DECL_NAME (field
) = DECL_NAME (decl
);
401 if (use_pointer_in_frame (decl
))
403 TREE_TYPE (field
) = build_pointer_type (TREE_TYPE (decl
));
404 SET_DECL_ALIGN (field
, TYPE_ALIGN (TREE_TYPE (field
)));
405 DECL_NONADDRESSABLE_P (field
) = 1;
409 TREE_TYPE (field
) = TREE_TYPE (decl
);
410 DECL_SOURCE_LOCATION (field
) = DECL_SOURCE_LOCATION (decl
);
411 SET_DECL_ALIGN (field
, DECL_ALIGN (decl
));
412 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (decl
);
413 DECL_IGNORED_P (field
) = DECL_IGNORED_P (decl
);
414 DECL_NONADDRESSABLE_P (field
) = !TREE_ADDRESSABLE (decl
);
415 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (decl
);
416 copy_warning (field
, decl
);
418 /* Declare the transformation and adjust the original DECL. For a
419 variable or for a parameter when not optimizing, we make it point
420 to the field in the frame directly. For a parameter, we don't do
421 it when optimizing because the variable tracking pass will already
423 if (VAR_P (decl
) || !optimize
)
426 = build3 (COMPONENT_REF
, TREE_TYPE (field
), info
->frame_decl
,
429 /* If the next declaration is a PARM_DECL pointing to the DECL,
430 we need to adjust its VALUE_EXPR directly, since chains of
431 VALUE_EXPRs run afoul of garbage collection. This occurs
432 in Ada for Out parameters that aren't copied in. */
433 tree next
= DECL_CHAIN (decl
);
435 && TREE_CODE (next
) == PARM_DECL
436 && DECL_HAS_VALUE_EXPR_P (next
)
437 && DECL_VALUE_EXPR (next
) == decl
)
438 SET_DECL_VALUE_EXPR (next
, x
);
440 SET_DECL_VALUE_EXPR (decl
, x
);
441 DECL_HAS_VALUE_EXPR_P (decl
) = 1;
445 insert_field_into_struct (type
, field
);
448 if (TREE_CODE (decl
) == PARM_DECL
)
449 info
->any_parm_remapped
= true;
455 /* Build or return the variable that holds the static chain within
456 INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */
459 get_chain_decl (struct nesting_info
*info
)
461 tree decl
= info
->chain_decl
;
467 type
= get_frame_type (info
->outer
);
468 type
= build_pointer_type (type
);
470 /* Note that this variable is *not* entered into any BIND_EXPR;
471 the construction of this variable is handled specially in
472 expand_function_start and initialize_inlined_parameters.
473 Note also that it's represented as a parameter. This is more
474 close to the truth, since the initial value does come from
476 decl
= build_decl (DECL_SOURCE_LOCATION (info
->context
),
477 PARM_DECL
, create_tmp_var_name ("CHAIN"), type
);
478 DECL_ARTIFICIAL (decl
) = 1;
479 DECL_IGNORED_P (decl
) = 1;
480 TREE_USED (decl
) = 1;
481 DECL_CONTEXT (decl
) = info
->context
;
482 DECL_ARG_TYPE (decl
) = type
;
484 /* Tell tree-inline.cc that we never write to this variable, so
485 it can copy-prop the replacement value immediately. */
486 TREE_READONLY (decl
) = 1;
488 info
->chain_decl
= decl
;
491 && (dump_flags
& TDF_DETAILS
)
492 && !DECL_STATIC_CHAIN (info
->context
))
493 fprintf (dump_file
, "Setting static-chain for %s\n",
494 lang_hooks
.decl_printable_name (info
->context
, 2));
496 DECL_STATIC_CHAIN (info
->context
) = 1;
501 /* Build or return the field within the non-local frame state that holds
502 the static chain for INFO->CONTEXT. This is the way to walk back up
503 multiple nesting levels. */
506 get_chain_field (struct nesting_info
*info
)
508 tree field
= info
->chain_field
;
512 tree type
= build_pointer_type (get_frame_type (info
->outer
));
514 field
= make_node (FIELD_DECL
);
515 DECL_NAME (field
) = get_identifier ("__chain");
516 TREE_TYPE (field
) = type
;
517 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
518 DECL_NONADDRESSABLE_P (field
) = 1;
520 insert_field_into_struct (get_frame_type (info
), field
);
522 info
->chain_field
= field
;
525 && (dump_flags
& TDF_DETAILS
)
526 && !DECL_STATIC_CHAIN (info
->context
))
527 fprintf (dump_file
, "Setting static-chain for %s\n",
528 lang_hooks
.decl_printable_name (info
->context
, 2));
530 DECL_STATIC_CHAIN (info
->context
) = 1;
535 /* Initialize a new temporary with the GIMPLE_CALL STMT. */
538 init_tmp_var_with_call (struct nesting_info
*info
, gimple_stmt_iterator
*gsi
,
543 t
= create_tmp_var_for (info
, gimple_call_return_type (call
), NULL
);
544 gimple_call_set_lhs (call
, t
);
545 if (! gsi_end_p (*gsi
))
546 gimple_set_location (call
, gimple_location (gsi_stmt (*gsi
)));
547 gsi_insert_before (gsi
, call
, GSI_SAME_STMT
);
553 /* Copy EXP into a temporary. Allocate the temporary in the context of
554 INFO and insert the initialization statement before GSI. */
557 init_tmp_var (struct nesting_info
*info
, tree exp
, gimple_stmt_iterator
*gsi
)
562 t
= create_tmp_var_for (info
, TREE_TYPE (exp
), NULL
);
563 stmt
= gimple_build_assign (t
, exp
);
564 if (! gsi_end_p (*gsi
))
565 gimple_set_location (stmt
, gimple_location (gsi_stmt (*gsi
)));
566 gsi_insert_before_without_update (gsi
, stmt
, GSI_SAME_STMT
);
572 /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */
575 gsi_gimplify_val (struct nesting_info
*info
, tree exp
,
576 gimple_stmt_iterator
*gsi
)
578 if (is_gimple_val (exp
))
581 return init_tmp_var (info
, exp
, gsi
);
584 /* Similarly, but copy from the temporary and insert the statement
585 after the iterator. */
588 save_tmp_var (struct nesting_info
*info
, tree exp
, gimple_stmt_iterator
*gsi
)
593 t
= create_tmp_var_for (info
, TREE_TYPE (exp
), NULL
);
594 stmt
= gimple_build_assign (exp
, t
);
595 if (! gsi_end_p (*gsi
))
596 gimple_set_location (stmt
, gimple_location (gsi_stmt (*gsi
)));
597 gsi_insert_after_without_update (gsi
, stmt
, GSI_SAME_STMT
);
602 /* Build or return the type used to represent a nested function trampoline. */
604 static GTY(()) tree trampoline_type
;
607 get_trampoline_type (struct nesting_info
*info
)
609 unsigned align
, size
;
613 return trampoline_type
;
615 /* When trampolines are created off-stack then the only thing we need in the
616 local frame is a single pointer. */
617 if (flag_trampoline_impl
== TRAMPOLINE_IMPL_HEAP
)
619 trampoline_type
= build_pointer_type (void_type_node
);
620 return trampoline_type
;
623 align
= TRAMPOLINE_ALIGNMENT
;
624 size
= TRAMPOLINE_SIZE
;
626 /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
627 then allocate extra space so that we can do dynamic alignment. */
628 if (align
> STACK_BOUNDARY
)
630 size
+= ((align
/BITS_PER_UNIT
) - 1) & -(STACK_BOUNDARY
/BITS_PER_UNIT
);
631 align
= STACK_BOUNDARY
;
634 t
= build_index_type (size_int (size
- 1));
635 t
= build_array_type (char_type_node
, t
);
636 t
= build_decl (DECL_SOURCE_LOCATION (info
->context
),
637 FIELD_DECL
, get_identifier ("__data"), t
);
638 SET_DECL_ALIGN (t
, align
);
639 DECL_USER_ALIGN (t
) = 1;
641 trampoline_type
= make_node (RECORD_TYPE
);
642 TYPE_NAME (trampoline_type
) = get_identifier ("__builtin_trampoline");
643 TYPE_FIELDS (trampoline_type
) = t
;
644 layout_type (trampoline_type
);
645 DECL_CONTEXT (t
) = trampoline_type
;
647 return trampoline_type
;
650 /* Build or return the type used to represent a nested function descriptor. */
652 static GTY(()) tree descriptor_type
;
655 get_descriptor_type (struct nesting_info
*info
)
657 /* The base alignment is that of a function. */
658 const unsigned align
= FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY
);
662 return descriptor_type
;
664 t
= build_index_type (integer_one_node
);
665 t
= build_array_type (ptr_type_node
, t
);
666 t
= build_decl (DECL_SOURCE_LOCATION (info
->context
),
667 FIELD_DECL
, get_identifier ("__data"), t
);
668 SET_DECL_ALIGN (t
, MAX (TYPE_ALIGN (ptr_type_node
), align
));
669 DECL_USER_ALIGN (t
) = 1;
671 descriptor_type
= make_node (RECORD_TYPE
);
672 TYPE_NAME (descriptor_type
) = get_identifier ("__builtin_descriptor");
673 TYPE_FIELDS (descriptor_type
) = t
;
674 layout_type (descriptor_type
);
675 DECL_CONTEXT (t
) = descriptor_type
;
677 return descriptor_type
;
680 /* Given DECL, a nested function, find or create an element in the
681 var map for this function. */
684 lookup_element_for_decl (struct nesting_info
*info
, tree decl
,
685 enum insert_option insert
)
687 if (insert
== NO_INSERT
)
689 tree
*slot
= info
->var_map
->get (decl
);
690 return slot
? *slot
: NULL_TREE
;
693 tree
*slot
= &info
->var_map
->get_or_insert (decl
);
695 *slot
= build_tree_list (NULL_TREE
, NULL_TREE
);
700 /* Given DECL, a nested function, create a field in the non-local
701 frame structure for this function. */
704 create_field_for_decl (struct nesting_info
*info
, tree decl
, tree type
)
706 tree field
= make_node (FIELD_DECL
);
707 DECL_NAME (field
) = DECL_NAME (decl
);
708 TREE_TYPE (field
) = type
;
709 TREE_ADDRESSABLE (field
) = 1;
710 insert_field_into_struct (get_frame_type (info
), field
);
714 /* Given DECL, a nested function, find or create a field in the non-local
715 frame structure for a trampoline for this function. */
718 lookup_tramp_for_decl (struct nesting_info
*info
, tree decl
,
719 enum insert_option insert
)
723 elt
= lookup_element_for_decl (info
, decl
, insert
);
727 field
= TREE_PURPOSE (elt
);
729 if (!field
&& insert
== INSERT
)
731 field
= create_field_for_decl (info
, decl
, get_trampoline_type (info
));
732 TREE_PURPOSE (elt
) = field
;
733 info
->any_tramp_created
= true;
739 /* Given DECL, a nested function, find or create a field in the non-local
740 frame structure for a descriptor for this function. */
743 lookup_descr_for_decl (struct nesting_info
*info
, tree decl
,
744 enum insert_option insert
)
748 elt
= lookup_element_for_decl (info
, decl
, insert
);
752 field
= TREE_VALUE (elt
);
754 if (!field
&& insert
== INSERT
)
756 field
= create_field_for_decl (info
, decl
, get_descriptor_type (info
));
757 TREE_VALUE (elt
) = field
;
758 info
->any_descr_created
= true;
764 /* Build or return the field within the non-local frame state that holds
765 the non-local goto "jmp_buf". The buffer itself is maintained by the
766 rtl middle-end as dynamic stack space is allocated. */
769 get_nl_goto_field (struct nesting_info
*info
)
771 tree field
= info
->nl_goto_field
;
777 /* For __builtin_nonlocal_goto, we need N words. The first is the
778 frame pointer, the rest is for the target's stack pointer save
779 area. The number of words is controlled by STACK_SAVEAREA_MODE;
780 not the best interface, but it'll do for now. */
781 if (Pmode
== ptr_mode
)
782 type
= ptr_type_node
;
784 type
= lang_hooks
.types
.type_for_mode (Pmode
, 1);
787 = as_a
<fixed_size_mode
> (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
));
788 size
= GET_MODE_SIZE (mode
);
789 size
= size
/ GET_MODE_SIZE (Pmode
);
792 type
= build_array_type
793 (type
, build_index_type (size_int (size
)));
795 field
= make_node (FIELD_DECL
);
796 DECL_NAME (field
) = get_identifier ("__nl_goto_buf");
797 TREE_TYPE (field
) = type
;
798 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
799 TREE_ADDRESSABLE (field
) = 1;
801 insert_field_into_struct (get_frame_type (info
), field
);
803 info
->nl_goto_field
= field
;
809 /* Invoke CALLBACK on all statements of GIMPLE sequence *PSEQ. */
812 walk_body (walk_stmt_fn callback_stmt
, walk_tree_fn callback_op
,
813 struct nesting_info
*info
, gimple_seq
*pseq
)
815 struct walk_stmt_info wi
;
817 memset (&wi
, 0, sizeof (wi
));
820 walk_gimple_seq_mod (pseq
, callback_stmt
, callback_op
, &wi
);
824 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT. */
827 walk_function (walk_stmt_fn callback_stmt
, walk_tree_fn callback_op
,
828 struct nesting_info
*info
)
830 gimple_seq body
= gimple_body (info
->context
);
831 walk_body (callback_stmt
, callback_op
, info
, &body
);
832 gimple_set_body (info
->context
, body
);
835 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body. */
838 walk_gimple_omp_for (gomp_for
*for_stmt
,
839 walk_stmt_fn callback_stmt
, walk_tree_fn callback_op
,
840 struct nesting_info
*info
)
842 struct walk_stmt_info wi
;
847 walk_body (callback_stmt
, callback_op
, info
, gimple_omp_for_pre_body_ptr (for_stmt
));
850 memset (&wi
, 0, sizeof (wi
));
852 wi
.gsi
= gsi_last (seq
);
854 for (i
= 0; i
< gimple_omp_for_collapse (for_stmt
); i
++)
857 walk_tree (gimple_omp_for_index_ptr (for_stmt
, i
), callback_op
,
861 walk_tree (gimple_omp_for_initial_ptr (for_stmt
, i
), callback_op
,
866 walk_tree (gimple_omp_for_final_ptr (for_stmt
, i
), callback_op
,
869 t
= gimple_omp_for_incr (for_stmt
, i
);
870 gcc_assert (BINARY_CLASS_P (t
));
872 walk_tree (&TREE_OPERAND (t
, 0), callback_op
, &wi
, NULL
);
875 walk_tree (&TREE_OPERAND (t
, 1), callback_op
, &wi
, NULL
);
878 seq
= gsi_seq (wi
.gsi
);
879 if (!gimple_seq_empty_p (seq
))
881 gimple_seq pre_body
= gimple_omp_for_pre_body (for_stmt
);
882 annotate_all_with_location (seq
, gimple_location (for_stmt
));
883 gimple_seq_add_seq (&pre_body
, seq
);
884 gimple_omp_for_set_pre_body (for_stmt
, pre_body
);
888 /* Similarly for ROOT and all functions nested underneath, depth first. */
891 walk_all_functions (walk_stmt_fn callback_stmt
, walk_tree_fn callback_op
,
892 struct nesting_info
*root
)
894 struct nesting_info
*n
;
895 FOR_EACH_NEST_INFO (n
, root
)
896 walk_function (callback_stmt
, callback_op
, n
);
900 /* We have to check for a fairly pathological case. The operands of function
901 nested function are to be interpreted in the context of the enclosing
902 function. So if any are variably-sized, they will get remapped when the
903 enclosing function is inlined. But that remapping would also have to be
904 done in the types of the PARM_DECLs of the nested function, meaning the
905 argument types of that function will disagree with the arguments in the
906 calls to that function. So we'd either have to make a copy of the nested
907 function corresponding to each time the enclosing function was inlined or
908 add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
909 function. The former is not practical. The latter would still require
910 detecting this case to know when to add the conversions. So, for now at
911 least, we don't inline such an enclosing function. A similar issue
912 applies if the nested function has a variably modified return type, and
913 is not inlined, but the enclosing function is inlined and so the type of
914 the return slot as used in the enclosing function is remapped, so also
915 avoid inlining in that case.
917 We have to do that check recursively, so here return indicating whether
918 FNDECL has such a nested function. ORIG_FN is the function we were
919 trying to inline to use for checking whether any argument is variably
920 modified by anything in it.
922 It would be better to do this in tree-inline.cc so that we could give
923 the appropriate warning for why a function can't be inlined, but that's
924 too late since the nesting structure has already been flattened and
925 adding a flag just to record this fact seems a waste of a flag. */
928 check_for_nested_with_variably_modified (tree fndecl
, tree orig_fndecl
)
930 struct cgraph_node
*cgn
= cgraph_node::get (fndecl
);
933 for (cgn
= first_nested_function (cgn
); cgn
;
934 cgn
= next_nested_function (cgn
))
936 if (variably_modified_type_p (TREE_TYPE (TREE_TYPE (cgn
->decl
)),
939 for (arg
= DECL_ARGUMENTS (cgn
->decl
); arg
; arg
= DECL_CHAIN (arg
))
940 if (variably_modified_type_p (TREE_TYPE (arg
), orig_fndecl
))
943 if (check_for_nested_with_variably_modified (cgn
->decl
,
951 /* Construct our local datastructure describing the function nesting
952 tree rooted by CGN. */
954 static struct nesting_info
*
955 create_nesting_tree (struct cgraph_node
*cgn
)
957 struct nesting_info
*info
= XCNEW (struct nesting_info
);
958 info
->field_map
= new hash_map
<tree
, tree
>;
959 info
->var_map
= new hash_map
<tree
, tree
>;
960 info
->mem_refs
= new hash_set
<tree
*>;
961 info
->suppress_expansion
= BITMAP_ALLOC (&nesting_info_bitmap_obstack
);
962 info
->context
= cgn
->decl
;
963 info
->thunk_p
= cgn
->thunk
;
965 for (cgn
= first_nested_function (cgn
); cgn
;
966 cgn
= next_nested_function (cgn
))
968 struct nesting_info
*sub
= create_nesting_tree (cgn
);
970 sub
->next
= info
->inner
;
974 /* See discussion at check_for_nested_with_variably_modified for a
975 discussion of why this has to be here. */
976 if (check_for_nested_with_variably_modified (info
->context
, info
->context
))
978 DECL_UNINLINABLE (info
->context
) = true;
979 tree attrs
= DECL_ATTRIBUTES (info
->context
);
980 if (lookup_attribute ("noclone", attrs
) == NULL
)
981 DECL_ATTRIBUTES (info
->context
)
982 = tree_cons (get_identifier ("noclone"), NULL
, attrs
);
988 /* Return an expression computing the static chain for TARGET_CONTEXT
989 from INFO->CONTEXT. Insert any necessary computations before TSI. */
992 get_static_chain (struct nesting_info
*info
, tree target_context
,
993 gimple_stmt_iterator
*gsi
)
995 struct nesting_info
*i
;
998 if (info
->context
== target_context
)
1000 x
= build_addr (info
->frame_decl
);
1001 info
->static_chain_added
|= 1;
1005 x
= get_chain_decl (info
);
1006 info
->static_chain_added
|= 2;
1008 for (i
= info
->outer
; i
->context
!= target_context
; i
= i
->outer
)
1010 tree field
= get_chain_field (i
);
1012 x
= build_simple_mem_ref_notrap (x
);
1013 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
1014 x
= init_tmp_var (info
, x
, gsi
);
1022 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
1023 frame as seen from INFO->CONTEXT. Insert any necessary computations
1027 get_frame_field (struct nesting_info
*info
, tree target_context
,
1028 tree field
, gimple_stmt_iterator
*gsi
)
1030 struct nesting_info
*i
;
1033 if (info
->context
== target_context
)
1035 /* Make sure frame_decl gets created. */
1036 (void) get_frame_type (info
);
1037 x
= info
->frame_decl
;
1038 info
->static_chain_added
|= 1;
1042 x
= get_chain_decl (info
);
1043 info
->static_chain_added
|= 2;
1045 for (i
= info
->outer
; i
->context
!= target_context
; i
= i
->outer
)
1047 tree field
= get_chain_field (i
);
1049 x
= build_simple_mem_ref_notrap (x
);
1050 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
1051 x
= init_tmp_var (info
, x
, gsi
);
1054 x
= build_simple_mem_ref_notrap (x
);
1057 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
1058 TREE_THIS_VOLATILE (x
) = TREE_THIS_VOLATILE (field
);
1062 static void note_nonlocal_vla_type (struct nesting_info
*info
, tree type
);
1064 /* Helper for get_nonlocal_debug_decl and get_local_debug_decl. */
1067 get_debug_decl (tree decl
)
1070 = build_decl (DECL_SOURCE_LOCATION (decl
),
1071 VAR_DECL
, DECL_NAME (decl
), TREE_TYPE (decl
));
1072 DECL_ARTIFICIAL (new_decl
) = DECL_ARTIFICIAL (decl
);
1073 DECL_IGNORED_P (new_decl
) = DECL_IGNORED_P (decl
);
1074 TREE_THIS_VOLATILE (new_decl
) = TREE_THIS_VOLATILE (decl
);
1075 TREE_SIDE_EFFECTS (new_decl
) = TREE_SIDE_EFFECTS (decl
);
1076 TREE_READONLY (new_decl
) = TREE_READONLY (decl
);
1077 TREE_ADDRESSABLE (new_decl
) = TREE_ADDRESSABLE (decl
);
1078 DECL_SEEN_IN_BIND_EXPR_P (new_decl
) = 1;
1079 if ((TREE_CODE (decl
) == PARM_DECL
1080 || TREE_CODE (decl
) == RESULT_DECL
1082 && DECL_BY_REFERENCE (decl
))
1083 DECL_BY_REFERENCE (new_decl
) = 1;
1084 /* Copy DECL_LANG_SPECIFIC and DECL_LANG_FLAG_* for OpenMP langhook
1086 DECL_LANG_SPECIFIC (new_decl
) = DECL_LANG_SPECIFIC (decl
);
1087 #define COPY_DLF(n) DECL_LANG_FLAG_##n (new_decl) = DECL_LANG_FLAG_##n (decl)
1088 COPY_DLF (0); COPY_DLF (1); COPY_DLF (2); COPY_DLF (3);
1089 COPY_DLF (4); COPY_DLF (5); COPY_DLF (6); COPY_DLF (7);
1095 /* A subroutine of convert_nonlocal_reference_op. Create a local variable
1096 in the nested function with DECL_VALUE_EXPR set to reference the true
1097 variable in the parent function. This is used both for debug info
1098 and in OMP lowering. */
1101 get_nonlocal_debug_decl (struct nesting_info
*info
, tree decl
)
1103 tree target_context
;
1104 struct nesting_info
*i
;
1105 tree x
, field
, new_decl
;
1107 tree
*slot
= &info
->var_map
->get_or_insert (decl
);
1112 target_context
= decl_function_context (decl
);
1114 /* A copy of the code in get_frame_field, but without the temporaries. */
1115 if (info
->context
== target_context
)
1117 /* Make sure frame_decl gets created. */
1118 (void) get_frame_type (info
);
1119 x
= info
->frame_decl
;
1121 info
->static_chain_added
|= 1;
1125 x
= get_chain_decl (info
);
1126 info
->static_chain_added
|= 2;
1127 for (i
= info
->outer
; i
->context
!= target_context
; i
= i
->outer
)
1129 field
= get_chain_field (i
);
1130 x
= build_simple_mem_ref_notrap (x
);
1131 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
1133 x
= build_simple_mem_ref_notrap (x
);
1136 field
= lookup_field_for_decl (i
, decl
, INSERT
);
1137 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
1138 if (use_pointer_in_frame (decl
))
1139 x
= build_simple_mem_ref_notrap (x
);
1141 /* ??? We should be remapping types as well, surely. */
1142 new_decl
= get_debug_decl (decl
);
1143 DECL_CONTEXT (new_decl
) = info
->context
;
1145 SET_DECL_VALUE_EXPR (new_decl
, x
);
1146 DECL_HAS_VALUE_EXPR_P (new_decl
) = 1;
1149 DECL_CHAIN (new_decl
) = info
->debug_var_chain
;
1150 info
->debug_var_chain
= new_decl
;
1153 && info
->context
!= target_context
1154 && variably_modified_type_p (TREE_TYPE (decl
), NULL
))
1155 note_nonlocal_vla_type (info
, TREE_TYPE (decl
));
1161 /* Callback for walk_gimple_stmt, rewrite all references to VAR
1162 and PARM_DECLs that belong to outer functions.
1164 The rewrite will involve some number of structure accesses back up
1165 the static chain. E.g. for a variable FOO up one nesting level it'll
1166 be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further
1167 indirections apply to decls for which use_pointer_in_frame is true. */
1170 convert_nonlocal_reference_op (tree
*tp
, int *walk_subtrees
, void *data
)
1172 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
1173 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
1177 switch (TREE_CODE (t
))
1180 /* Non-automatic variables are never processed. */
1181 if (TREE_STATIC (t
) || DECL_EXTERNAL (t
))
1187 tree x
, target_context
= decl_function_context (t
);
1189 if (info
->context
== target_context
)
1194 if (bitmap_bit_p (info
->suppress_expansion
, DECL_UID (t
)))
1195 x
= get_nonlocal_debug_decl (info
, t
);
1198 struct nesting_info
*i
= info
;
1199 while (i
&& i
->context
!= target_context
)
1201 /* If none of the outer contexts is the target context, this means
1202 that the VAR or PARM_DECL is referenced in a wrong context. */
1204 internal_error ("%s from %s referenced in %s",
1205 IDENTIFIER_POINTER (DECL_NAME (t
)),
1206 IDENTIFIER_POINTER (DECL_NAME (target_context
)),
1207 IDENTIFIER_POINTER (DECL_NAME (info
->context
)));
1209 x
= lookup_field_for_decl (i
, t
, INSERT
);
1210 x
= get_frame_field (info
, target_context
, x
, &wi
->gsi
);
1211 if (use_pointer_in_frame (t
))
1213 x
= init_tmp_var (info
, x
, &wi
->gsi
);
1214 x
= build_simple_mem_ref_notrap (x
);
1221 x
= save_tmp_var (info
, x
, &wi
->gsi
);
1223 x
= init_tmp_var (info
, x
, &wi
->gsi
);
1231 /* We're taking the address of a label from a parent function, but
1232 this is not itself a non-local goto. Mark the label such that it
1233 will not be deleted, much as we would with a label address in
1235 if (decl_function_context (t
) != info
->context
)
1236 FORCED_LABEL (t
) = 1;
1241 bool save_val_only
= wi
->val_only
;
1243 wi
->val_only
= false;
1245 wi
->changed
= false;
1246 walk_tree (&TREE_OPERAND (t
, 0), convert_nonlocal_reference_op
, wi
, 0);
1247 wi
->val_only
= true;
1253 /* If we changed anything, we might no longer be directly
1254 referencing a decl. */
1255 save_context
= current_function_decl
;
1256 current_function_decl
= info
->context
;
1257 recompute_tree_invariant_for_addr_expr (t
);
1259 /* If the callback converted the address argument in a context
1260 where we only accept variables (and min_invariant, presumably),
1261 then compute the address into a temporary. */
1263 *tp
= gsi_gimplify_val ((struct nesting_info
*) wi
->info
,
1265 current_function_decl
= save_context
;
1274 case ARRAY_RANGE_REF
:
1276 /* Go down this entire nest and just look at the final prefix and
1277 anything that describes the references. Otherwise, we lose track
1278 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1279 wi
->val_only
= true;
1281 for (; handled_component_p (t
); tp
= &TREE_OPERAND (t
, 0), t
= *tp
)
1283 if (TREE_CODE (t
) == COMPONENT_REF
)
1284 walk_tree (&TREE_OPERAND (t
, 2), convert_nonlocal_reference_op
, wi
,
1286 else if (TREE_CODE (t
) == ARRAY_REF
1287 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
1289 walk_tree (&TREE_OPERAND (t
, 1), convert_nonlocal_reference_op
,
1291 walk_tree (&TREE_OPERAND (t
, 2), convert_nonlocal_reference_op
,
1293 walk_tree (&TREE_OPERAND (t
, 3), convert_nonlocal_reference_op
,
1297 wi
->val_only
= false;
1298 walk_tree (tp
, convert_nonlocal_reference_op
, wi
, NULL
);
1301 case VIEW_CONVERT_EXPR
:
1302 /* Just request to look at the subtrees, leaving val_only and lhs
1303 untouched. This might actually be for !val_only + lhs, in which
1304 case we don't want to force a replacement by a temporary. */
1309 if (!IS_TYPE_OR_DECL_P (t
))
1312 wi
->val_only
= true;
1321 static tree
convert_nonlocal_reference_stmt (gimple_stmt_iterator
*, bool *,
1322 struct walk_stmt_info
*);
1324 /* Helper for convert_nonlocal_references, rewrite all references to VAR
1325 and PARM_DECLs that belong to outer functions. */
1328 convert_nonlocal_omp_clauses (tree
*pclauses
, struct walk_stmt_info
*wi
)
1330 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
1331 bool need_chain
= false, need_stmts
= false;
1332 tree clause
, decl
, *pdecl
;
1334 bitmap new_suppress
;
1336 new_suppress
= BITMAP_GGC_ALLOC ();
1337 bitmap_copy (new_suppress
, info
->suppress_expansion
);
1339 for (clause
= *pclauses
; clause
; clause
= OMP_CLAUSE_CHAIN (clause
))
1342 switch (OMP_CLAUSE_CODE (clause
))
1344 case OMP_CLAUSE_REDUCTION
:
1345 case OMP_CLAUSE_IN_REDUCTION
:
1346 case OMP_CLAUSE_TASK_REDUCTION
:
1347 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1349 if (TREE_CODE (OMP_CLAUSE_DECL (clause
)) == MEM_REF
)
1351 pdecl
= &TREE_OPERAND (OMP_CLAUSE_DECL (clause
), 0);
1352 if (TREE_CODE (*pdecl
) == POINTER_PLUS_EXPR
)
1353 pdecl
= &TREE_OPERAND (*pdecl
, 0);
1354 if (INDIRECT_REF_P (*pdecl
)
1355 || TREE_CODE (*pdecl
) == ADDR_EXPR
)
1356 pdecl
= &TREE_OPERAND (*pdecl
, 0);
1358 goto do_decl_clause
;
1360 case OMP_CLAUSE_LASTPRIVATE
:
1361 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause
))
1363 goto do_decl_clause
;
1365 case OMP_CLAUSE_LINEAR
:
1366 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause
))
1368 wi
->val_only
= true;
1370 convert_nonlocal_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause
),
1372 goto do_decl_clause
;
1374 case OMP_CLAUSE_PRIVATE
:
1375 case OMP_CLAUSE_FIRSTPRIVATE
:
1376 case OMP_CLAUSE_COPYPRIVATE
:
1377 case OMP_CLAUSE_SHARED
:
1378 case OMP_CLAUSE_ENTER
:
1379 case OMP_CLAUSE_LINK
:
1380 case OMP_CLAUSE_USE_DEVICE_PTR
:
1381 case OMP_CLAUSE_USE_DEVICE_ADDR
:
1382 case OMP_CLAUSE_HAS_DEVICE_ADDR
:
1383 case OMP_CLAUSE_IS_DEVICE_PTR
:
1384 case OMP_CLAUSE_DETACH
:
1387 pdecl
= &OMP_CLAUSE_DECL (clause
);
1390 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
1392 if (decl_function_context (decl
) != info
->context
)
1394 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_SHARED
)
1395 OMP_CLAUSE_SHARED_READONLY (clause
) = 0;
1396 bitmap_set_bit (new_suppress
, DECL_UID (decl
));
1397 *pdecl
= get_nonlocal_debug_decl (info
, decl
);
1398 if (OMP_CLAUSE_CODE (clause
) != OMP_CLAUSE_PRIVATE
)
1403 case OMP_CLAUSE_SCHEDULE
:
1404 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause
) == NULL
)
1407 case OMP_CLAUSE_FINAL
:
1409 case OMP_CLAUSE_SELF
:
1410 case OMP_CLAUSE_NUM_THREADS
:
1411 case OMP_CLAUSE_DEPEND
:
1412 case OMP_CLAUSE_DOACROSS
:
1413 case OMP_CLAUSE_DEVICE
:
1414 case OMP_CLAUSE_NUM_TEAMS
:
1415 case OMP_CLAUSE_THREAD_LIMIT
:
1416 case OMP_CLAUSE_SAFELEN
:
1417 case OMP_CLAUSE_SIMDLEN
:
1418 case OMP_CLAUSE_PRIORITY
:
1419 case OMP_CLAUSE_GRAINSIZE
:
1420 case OMP_CLAUSE_NUM_TASKS
:
1421 case OMP_CLAUSE_HINT
:
1422 case OMP_CLAUSE_FILTER
:
1423 case OMP_CLAUSE_NUM_GANGS
:
1424 case OMP_CLAUSE_NUM_WORKERS
:
1425 case OMP_CLAUSE_VECTOR_LENGTH
:
1426 case OMP_CLAUSE_GANG
:
1427 case OMP_CLAUSE_WORKER
:
1428 case OMP_CLAUSE_VECTOR
:
1429 case OMP_CLAUSE_ASYNC
:
1430 case OMP_CLAUSE_WAIT
:
1431 /* Several OpenACC clauses have optional arguments. Check if they
1433 if (OMP_CLAUSE_OPERAND (clause
, 0))
1435 wi
->val_only
= true;
1437 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause
, 0),
1441 /* The gang clause accepts two arguments. */
1442 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_GANG
1443 && OMP_CLAUSE_GANG_STATIC_EXPR (clause
))
1445 wi
->val_only
= true;
1447 convert_nonlocal_reference_op
1448 (&OMP_CLAUSE_GANG_STATIC_EXPR (clause
), &dummy
, wi
);
1452 case OMP_CLAUSE_DIST_SCHEDULE
:
1453 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause
) != NULL
)
1455 wi
->val_only
= true;
1457 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause
, 0),
1462 case OMP_CLAUSE_MAP
:
1464 case OMP_CLAUSE_FROM
:
1465 if (OMP_CLAUSE_SIZE (clause
))
1467 wi
->val_only
= true;
1469 convert_nonlocal_reference_op (&OMP_CLAUSE_SIZE (clause
),
1472 if (DECL_P (OMP_CLAUSE_DECL (clause
)))
1473 goto do_decl_clause
;
1474 wi
->val_only
= true;
1476 walk_tree (&OMP_CLAUSE_DECL (clause
), convert_nonlocal_reference_op
,
1480 case OMP_CLAUSE_ALIGNED
:
1481 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
))
1483 wi
->val_only
= true;
1485 convert_nonlocal_reference_op
1486 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
), &dummy
, wi
);
1489 case OMP_CLAUSE_NONTEMPORAL
:
1490 do_decl_clause_no_supp
:
1491 /* Like do_decl_clause, but don't add any suppression. */
1492 decl
= OMP_CLAUSE_DECL (clause
);
1494 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
1496 if (decl_function_context (decl
) != info
->context
)
1498 OMP_CLAUSE_DECL (clause
) = get_nonlocal_debug_decl (info
, decl
);
1503 case OMP_CLAUSE_ALLOCATE
:
1504 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (clause
))
1506 wi
->val_only
= true;
1508 convert_nonlocal_reference_op
1509 (&OMP_CLAUSE_ALLOCATE_ALLOCATOR (clause
), &dummy
, wi
);
1511 goto do_decl_clause_no_supp
;
1513 case OMP_CLAUSE_NOWAIT
:
1514 case OMP_CLAUSE_ORDERED
:
1515 case OMP_CLAUSE_DEFAULT
:
1516 case OMP_CLAUSE_COPYIN
:
1517 case OMP_CLAUSE_COLLAPSE
:
1518 case OMP_CLAUSE_TILE
:
1519 case OMP_CLAUSE_UNTIED
:
1520 case OMP_CLAUSE_MERGEABLE
:
1521 case OMP_CLAUSE_PROC_BIND
:
1522 case OMP_CLAUSE_NOGROUP
:
1523 case OMP_CLAUSE_THREADS
:
1524 case OMP_CLAUSE_SIMD
:
1525 case OMP_CLAUSE_DEFAULTMAP
:
1526 case OMP_CLAUSE_ORDER
:
1527 case OMP_CLAUSE_SEQ
:
1528 case OMP_CLAUSE_INDEPENDENT
:
1529 case OMP_CLAUSE_AUTO
:
1530 case OMP_CLAUSE_IF_PRESENT
:
1531 case OMP_CLAUSE_FINALIZE
:
1532 case OMP_CLAUSE_BIND
:
1533 case OMP_CLAUSE__CONDTEMP_
:
1534 case OMP_CLAUSE__SCANTEMP_
:
1537 /* The following clause belongs to the OpenACC cache directive, which
1538 is discarded during gimplification. */
1539 case OMP_CLAUSE__CACHE_
:
1540 /* The following clauses are only allowed in the OpenMP declare simd
1541 directive, so not seen here. */
1542 case OMP_CLAUSE_UNIFORM
:
1543 case OMP_CLAUSE_INBRANCH
:
1544 case OMP_CLAUSE_NOTINBRANCH
:
1545 /* The following clauses are only allowed on OpenMP cancel and
1546 cancellation point directives, which at this point have already
1547 been lowered into a function call. */
1548 case OMP_CLAUSE_FOR
:
1549 case OMP_CLAUSE_PARALLEL
:
1550 case OMP_CLAUSE_SECTIONS
:
1551 case OMP_CLAUSE_TASKGROUP
:
1552 /* The following clauses are only added during OMP lowering; nested
1553 function decomposition happens before that. */
1554 case OMP_CLAUSE__LOOPTEMP_
:
1555 case OMP_CLAUSE__REDUCTEMP_
:
1556 case OMP_CLAUSE__SIMDUID_
:
1557 case OMP_CLAUSE__SIMT_
:
1558 /* The following clauses are only allowed on OpenACC 'routine'
1559 directives, not seen here. */
1560 case OMP_CLAUSE_NOHOST
:
1561 /* Anything else. */
1567 info
->suppress_expansion
= new_suppress
;
1570 for (clause
= *pclauses
; clause
; clause
= OMP_CLAUSE_CHAIN (clause
))
1571 switch (OMP_CLAUSE_CODE (clause
))
1573 case OMP_CLAUSE_REDUCTION
:
1574 case OMP_CLAUSE_IN_REDUCTION
:
1575 case OMP_CLAUSE_TASK_REDUCTION
:
1576 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1579 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
));
1580 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1582 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
1583 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
1585 tree save_local_var_chain
= info
->new_local_var_chain
;
1586 info
->new_local_var_chain
= NULL
;
1587 gimple_seq
*seq
= &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause
);
1588 walk_body (convert_nonlocal_reference_stmt
,
1589 convert_nonlocal_reference_op
, info
, seq
);
1590 if (info
->new_local_var_chain
)
1591 declare_vars (info
->new_local_var_chain
,
1592 gimple_seq_first_stmt (*seq
), false);
1593 info
->new_local_var_chain
= NULL
;
1594 seq
= &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause
);
1595 walk_body (convert_nonlocal_reference_stmt
,
1596 convert_nonlocal_reference_op
, info
, seq
);
1597 if (info
->new_local_var_chain
)
1598 declare_vars (info
->new_local_var_chain
,
1599 gimple_seq_first_stmt (*seq
), false);
1600 info
->new_local_var_chain
= save_local_var_chain
;
1601 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1603 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
1604 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
1609 case OMP_CLAUSE_LASTPRIVATE
:
1610 case OMP_CLAUSE_LINEAR
:
1612 tree save_local_var_chain
= info
->new_local_var_chain
;
1613 info
->new_local_var_chain
= NULL
;
1615 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LASTPRIVATE
)
1616 seq
= &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause
);
1618 seq
= &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause
);
1619 walk_body (convert_nonlocal_reference_stmt
,
1620 convert_nonlocal_reference_op
, info
, seq
);
1621 if (info
->new_local_var_chain
)
1623 gimple
*g
= gimple_seq_first_stmt (*seq
);
1624 if (gimple_code (g
) != GIMPLE_BIND
)
1626 g
= gimple_build_bind (NULL_TREE
, *seq
, NULL_TREE
);
1628 gimple_seq_add_stmt_without_update (seq
, g
);
1630 declare_vars (info
->new_local_var_chain
,
1631 gimple_seq_first_stmt (*seq
), false);
1633 info
->new_local_var_chain
= save_local_var_chain
;
1644 /* Create nonlocal debug decls for nonlocal VLA array bounds. */
1647 note_nonlocal_vla_type (struct nesting_info
*info
, tree type
)
1649 while (POINTER_TYPE_P (type
) && !TYPE_NAME (type
))
1650 type
= TREE_TYPE (type
);
1652 if (TYPE_NAME (type
)
1653 && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
1654 && DECL_ORIGINAL_TYPE (TYPE_NAME (type
)))
1655 type
= DECL_ORIGINAL_TYPE (TYPE_NAME (type
));
1657 while (POINTER_TYPE_P (type
)
1658 || VECTOR_TYPE_P (type
)
1659 || TREE_CODE (type
) == FUNCTION_TYPE
1660 || TREE_CODE (type
) == METHOD_TYPE
)
1661 type
= TREE_TYPE (type
);
1663 if (TREE_CODE (type
) == ARRAY_TYPE
)
1667 note_nonlocal_vla_type (info
, TREE_TYPE (type
));
1668 domain
= TYPE_DOMAIN (type
);
1671 t
= TYPE_MIN_VALUE (domain
);
1672 if (t
&& (VAR_P (t
) || TREE_CODE (t
) == PARM_DECL
)
1673 && decl_function_context (t
) != info
->context
)
1674 get_nonlocal_debug_decl (info
, t
);
1675 t
= TYPE_MAX_VALUE (domain
);
1676 if (t
&& (VAR_P (t
) || TREE_CODE (t
) == PARM_DECL
)
1677 && decl_function_context (t
) != info
->context
)
1678 get_nonlocal_debug_decl (info
, t
);
1683 /* Callback for walk_gimple_stmt. Rewrite all references to VAR and
1684 PARM_DECLs that belong to outer functions. This handles statements
1685 that are not handled via the standard recursion done in
1686 walk_gimple_stmt. STMT is the statement to examine, DATA is as in
1687 convert_nonlocal_reference_op. Set *HANDLED_OPS_P to true if all the
1688 operands of STMT have been handled by this function. */
1691 convert_nonlocal_reference_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
1692 struct walk_stmt_info
*wi
)
1694 struct nesting_info
*info
= (struct nesting_info
*) wi
->info
;
1695 tree save_local_var_chain
;
1696 bitmap save_suppress
;
1697 gimple
*stmt
= gsi_stmt (*gsi
);
1699 switch (gimple_code (stmt
))
1702 /* Don't walk non-local gotos for now. */
1703 if (TREE_CODE (gimple_goto_dest (stmt
)) != LABEL_DECL
)
1705 wi
->val_only
= true;
1707 *handled_ops_p
= false;
1712 case GIMPLE_OMP_TEAMS
:
1713 if (!gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
1715 save_suppress
= info
->suppress_expansion
;
1716 convert_nonlocal_omp_clauses (gimple_omp_teams_clauses_ptr (stmt
),
1718 walk_body (convert_nonlocal_reference_stmt
,
1719 convert_nonlocal_reference_op
, info
,
1720 gimple_omp_body_ptr (stmt
));
1721 info
->suppress_expansion
= save_suppress
;
1726 case GIMPLE_OMP_PARALLEL
:
1727 case GIMPLE_OMP_TASK
:
1728 save_suppress
= info
->suppress_expansion
;
1729 if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt
),
1733 decl
= get_chain_decl (info
);
1734 c
= build_omp_clause (gimple_location (stmt
),
1735 OMP_CLAUSE_FIRSTPRIVATE
);
1736 OMP_CLAUSE_DECL (c
) = decl
;
1737 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
1738 gimple_omp_taskreg_set_clauses (stmt
, c
);
1741 save_local_var_chain
= info
->new_local_var_chain
;
1742 info
->new_local_var_chain
= NULL
;
1744 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1745 info
, gimple_omp_body_ptr (stmt
));
1747 if (info
->new_local_var_chain
)
1748 declare_vars (info
->new_local_var_chain
,
1749 gimple_seq_first_stmt (gimple_omp_body (stmt
)),
1751 info
->new_local_var_chain
= save_local_var_chain
;
1752 info
->suppress_expansion
= save_suppress
;
1755 case GIMPLE_OMP_FOR
:
1756 save_suppress
= info
->suppress_expansion
;
1757 convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt
), wi
);
1758 walk_gimple_omp_for (as_a
<gomp_for
*> (stmt
),
1759 convert_nonlocal_reference_stmt
,
1760 convert_nonlocal_reference_op
, info
);
1761 walk_body (convert_nonlocal_reference_stmt
,
1762 convert_nonlocal_reference_op
, info
, gimple_omp_body_ptr (stmt
));
1763 info
->suppress_expansion
= save_suppress
;
1766 case GIMPLE_OMP_SECTIONS
:
1767 save_suppress
= info
->suppress_expansion
;
1768 convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt
), wi
);
1769 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1770 info
, gimple_omp_body_ptr (stmt
));
1771 info
->suppress_expansion
= save_suppress
;
1774 case GIMPLE_OMP_SINGLE
:
1775 save_suppress
= info
->suppress_expansion
;
1776 convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt
), wi
);
1777 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1778 info
, gimple_omp_body_ptr (stmt
));
1779 info
->suppress_expansion
= save_suppress
;
1782 case GIMPLE_OMP_SCOPE
:
1783 save_suppress
= info
->suppress_expansion
;
1784 convert_nonlocal_omp_clauses (gimple_omp_scope_clauses_ptr (stmt
), wi
);
1785 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1786 info
, gimple_omp_body_ptr (stmt
));
1787 info
->suppress_expansion
= save_suppress
;
1790 case GIMPLE_OMP_TASKGROUP
:
1791 save_suppress
= info
->suppress_expansion
;
1792 convert_nonlocal_omp_clauses (gimple_omp_taskgroup_clauses_ptr (stmt
), wi
);
1793 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1794 info
, gimple_omp_body_ptr (stmt
));
1795 info
->suppress_expansion
= save_suppress
;
1798 case GIMPLE_OMP_TARGET
:
1799 if (!is_gimple_omp_offloaded (stmt
))
1801 save_suppress
= info
->suppress_expansion
;
1802 convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt
),
1804 info
->suppress_expansion
= save_suppress
;
1805 walk_body (convert_nonlocal_reference_stmt
,
1806 convert_nonlocal_reference_op
, info
,
1807 gimple_omp_body_ptr (stmt
));
1810 save_suppress
= info
->suppress_expansion
;
1811 if (convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt
),
1815 decl
= get_chain_decl (info
);
1816 c
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE_MAP
);
1817 OMP_CLAUSE_DECL (c
) = decl
;
1818 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_TO
);
1819 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
1820 OMP_CLAUSE_CHAIN (c
) = gimple_omp_target_clauses (stmt
);
1821 gimple_omp_target_set_clauses (as_a
<gomp_target
*> (stmt
), c
);
1824 save_local_var_chain
= info
->new_local_var_chain
;
1825 info
->new_local_var_chain
= NULL
;
1827 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1828 info
, gimple_omp_body_ptr (stmt
));
1830 if (info
->new_local_var_chain
)
1831 declare_vars (info
->new_local_var_chain
,
1832 gimple_seq_first_stmt (gimple_omp_body (stmt
)),
1834 info
->new_local_var_chain
= save_local_var_chain
;
1835 info
->suppress_expansion
= save_suppress
;
1838 case GIMPLE_OMP_SECTION
:
1839 case GIMPLE_OMP_STRUCTURED_BLOCK
:
1840 case GIMPLE_OMP_MASTER
:
1841 case GIMPLE_OMP_MASKED
:
1842 case GIMPLE_OMP_ORDERED
:
1843 case GIMPLE_OMP_SCAN
:
1844 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1845 info
, gimple_omp_body_ptr (stmt
));
1850 gbind
*bind_stmt
= as_a
<gbind
*> (stmt
);
1852 for (tree var
= gimple_bind_vars (bind_stmt
); var
; var
= DECL_CHAIN (var
))
1853 if (TREE_CODE (var
) == NAMELIST_DECL
)
1855 /* Adjust decls mentioned in NAMELIST_DECL. */
1856 tree decls
= NAMELIST_DECL_ASSOCIATED_DECL (var
);
1860 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls
), i
, decl
)
1863 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
1865 if (decl_function_context (decl
) != info
->context
)
1866 CONSTRUCTOR_ELT (decls
, i
)->value
1867 = get_nonlocal_debug_decl (info
, decl
);
1871 *handled_ops_p
= false;
1875 wi
->val_only
= true;
1877 *handled_ops_p
= false;
1881 if (gimple_clobber_p (stmt
))
1883 tree lhs
= gimple_assign_lhs (stmt
);
1885 && !(TREE_STATIC (lhs
) || DECL_EXTERNAL (lhs
))
1886 && decl_function_context (lhs
) != info
->context
)
1888 gsi_replace (gsi
, gimple_build_nop (), true);
1892 *handled_ops_p
= false;
1896 /* For every other statement that we are not interested in
1897 handling here, let the walker traverse the operands. */
1898 *handled_ops_p
= false;
1902 /* We have handled all of STMT operands, no need to traverse the operands. */
1903 *handled_ops_p
= true;
1908 /* A subroutine of convert_local_reference. Create a local variable
1909 in the parent function with DECL_VALUE_EXPR set to reference the
1910 field in FRAME. This is used both for debug info and in OMP
1914 get_local_debug_decl (struct nesting_info
*info
, tree decl
, tree field
)
1918 tree
*slot
= &info
->var_map
->get_or_insert (decl
);
1922 /* Make sure frame_decl gets created. */
1923 (void) get_frame_type (info
);
1924 x
= info
->frame_decl
;
1925 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
1927 new_decl
= get_debug_decl (decl
);
1928 DECL_CONTEXT (new_decl
) = info
->context
;
1930 SET_DECL_VALUE_EXPR (new_decl
, x
);
1931 DECL_HAS_VALUE_EXPR_P (new_decl
) = 1;
1934 DECL_CHAIN (new_decl
) = info
->debug_var_chain
;
1935 info
->debug_var_chain
= new_decl
;
1937 /* Do not emit debug info twice. */
1938 DECL_IGNORED_P (decl
) = 1;
1944 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1945 and PARM_DECLs that were referenced by inner nested functions.
1946 The rewrite will be a structure reference to the local frame variable. */
1948 static bool convert_local_omp_clauses (tree
*, struct walk_stmt_info
*);
1951 convert_local_reference_op (tree
*tp
, int *walk_subtrees
, void *data
)
1953 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
1954 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
1955 tree t
= *tp
, field
, x
;
1959 switch (TREE_CODE (t
))
1962 /* Non-automatic variables are never processed. */
1963 if (TREE_STATIC (t
) || DECL_EXTERNAL (t
))
1968 if (t
!= info
->frame_decl
&& decl_function_context (t
) == info
->context
)
1970 /* If we copied a pointer to the frame, then the original decl
1971 is used unchanged in the parent function. */
1972 if (use_pointer_in_frame (t
))
1975 /* No need to transform anything if no child references the
1977 field
= lookup_field_for_decl (info
, t
, NO_INSERT
);
1982 if (bitmap_bit_p (info
->suppress_expansion
, DECL_UID (t
)))
1983 x
= get_local_debug_decl (info
, t
, field
);
1985 x
= get_frame_field (info
, info
->context
, field
, &wi
->gsi
);
1990 x
= save_tmp_var (info
, x
, &wi
->gsi
);
1992 x
= init_tmp_var (info
, x
, &wi
->gsi
);
2000 save_val_only
= wi
->val_only
;
2001 wi
->val_only
= false;
2003 wi
->changed
= false;
2004 walk_tree (&TREE_OPERAND (t
, 0), convert_local_reference_op
, wi
, NULL
);
2005 wi
->val_only
= save_val_only
;
2007 /* If we converted anything ... */
2012 /* Then the frame decl is now addressable. */
2013 TREE_ADDRESSABLE (info
->frame_decl
) = 1;
2015 save_context
= current_function_decl
;
2016 current_function_decl
= info
->context
;
2017 recompute_tree_invariant_for_addr_expr (t
);
2019 /* If we are in a context where we only accept values, then
2020 compute the address into a temporary. */
2022 *tp
= gsi_gimplify_val ((struct nesting_info
*) wi
->info
,
2024 current_function_decl
= save_context
;
2032 case ARRAY_RANGE_REF
:
2034 /* Go down this entire nest and just look at the final prefix and
2035 anything that describes the references. Otherwise, we lose track
2036 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
2037 save_val_only
= wi
->val_only
;
2038 wi
->val_only
= true;
2040 for (; handled_component_p (t
); tp
= &TREE_OPERAND (t
, 0), t
= *tp
)
2042 if (TREE_CODE (t
) == COMPONENT_REF
)
2043 walk_tree (&TREE_OPERAND (t
, 2), convert_local_reference_op
, wi
,
2045 else if (TREE_CODE (t
) == ARRAY_REF
2046 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
2048 walk_tree (&TREE_OPERAND (t
, 1), convert_local_reference_op
, wi
,
2050 walk_tree (&TREE_OPERAND (t
, 2), convert_local_reference_op
, wi
,
2052 walk_tree (&TREE_OPERAND (t
, 3), convert_local_reference_op
, wi
,
2056 wi
->val_only
= false;
2057 walk_tree (tp
, convert_local_reference_op
, wi
, NULL
);
2058 wi
->val_only
= save_val_only
;
2062 save_val_only
= wi
->val_only
;
2063 wi
->val_only
= true;
2065 walk_tree (&TREE_OPERAND (t
, 0), convert_local_reference_op
,
2067 /* We need to re-fold the MEM_REF as component references as
2068 part of a ADDR_EXPR address are not allowed. But we cannot
2069 fold here, as the chain record type is not yet finalized. */
2070 if (TREE_CODE (TREE_OPERAND (t
, 0)) == ADDR_EXPR
2071 && !DECL_P (TREE_OPERAND (TREE_OPERAND (t
, 0), 0)))
2072 info
->mem_refs
->add (tp
);
2073 wi
->val_only
= save_val_only
;
2076 case VIEW_CONVERT_EXPR
:
2077 /* Just request to look at the subtrees, leaving val_only and lhs
2078 untouched. This might actually be for !val_only + lhs, in which
2079 case we don't want to force a replacement by a temporary. */
2084 if (!IS_TYPE_OR_DECL_P (t
))
2087 wi
->val_only
= true;
2096 static tree
convert_local_reference_stmt (gimple_stmt_iterator
*, bool *,
2097 struct walk_stmt_info
*);
2099 /* Helper for convert_local_reference. Convert all the references in
2100 the chain of clauses at *PCLAUSES. WI is as in convert_local_reference. */
2103 convert_local_omp_clauses (tree
*pclauses
, struct walk_stmt_info
*wi
)
2105 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
2106 bool need_frame
= false, need_stmts
= false;
2107 tree clause
, decl
, *pdecl
;
2109 bitmap new_suppress
;
2111 new_suppress
= BITMAP_GGC_ALLOC ();
2112 bitmap_copy (new_suppress
, info
->suppress_expansion
);
2114 for (clause
= *pclauses
; clause
; clause
= OMP_CLAUSE_CHAIN (clause
))
2117 switch (OMP_CLAUSE_CODE (clause
))
2119 case OMP_CLAUSE_REDUCTION
:
2120 case OMP_CLAUSE_IN_REDUCTION
:
2121 case OMP_CLAUSE_TASK_REDUCTION
:
2122 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
2124 if (TREE_CODE (OMP_CLAUSE_DECL (clause
)) == MEM_REF
)
2126 pdecl
= &TREE_OPERAND (OMP_CLAUSE_DECL (clause
), 0);
2127 if (TREE_CODE (*pdecl
) == POINTER_PLUS_EXPR
)
2128 pdecl
= &TREE_OPERAND (*pdecl
, 0);
2129 if (INDIRECT_REF_P (*pdecl
)
2130 || TREE_CODE (*pdecl
) == ADDR_EXPR
)
2131 pdecl
= &TREE_OPERAND (*pdecl
, 0);
2133 goto do_decl_clause
;
2135 case OMP_CLAUSE_LASTPRIVATE
:
2136 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause
))
2138 goto do_decl_clause
;
2140 case OMP_CLAUSE_LINEAR
:
2141 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause
))
2143 wi
->val_only
= true;
2145 convert_local_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause
), &dummy
,
2147 goto do_decl_clause
;
2149 case OMP_CLAUSE_PRIVATE
:
2150 case OMP_CLAUSE_FIRSTPRIVATE
:
2151 case OMP_CLAUSE_COPYPRIVATE
:
2152 case OMP_CLAUSE_SHARED
:
2153 case OMP_CLAUSE_ENTER
:
2154 case OMP_CLAUSE_LINK
:
2155 case OMP_CLAUSE_USE_DEVICE_PTR
:
2156 case OMP_CLAUSE_USE_DEVICE_ADDR
:
2157 case OMP_CLAUSE_HAS_DEVICE_ADDR
:
2158 case OMP_CLAUSE_IS_DEVICE_PTR
:
2159 case OMP_CLAUSE_DETACH
:
2162 pdecl
= &OMP_CLAUSE_DECL (clause
);
2165 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
2167 if (decl_function_context (decl
) == info
->context
2168 && !use_pointer_in_frame (decl
))
2170 tree field
= lookup_field_for_decl (info
, decl
, NO_INSERT
);
2173 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_SHARED
)
2174 OMP_CLAUSE_SHARED_READONLY (clause
) = 0;
2175 bitmap_set_bit (new_suppress
, DECL_UID (decl
));
2176 *pdecl
= get_local_debug_decl (info
, decl
, field
);
2182 case OMP_CLAUSE_SCHEDULE
:
2183 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause
) == NULL
)
2186 case OMP_CLAUSE_FINAL
:
2188 case OMP_CLAUSE_SELF
:
2189 case OMP_CLAUSE_NUM_THREADS
:
2190 case OMP_CLAUSE_DEPEND
:
2191 case OMP_CLAUSE_DOACROSS
:
2192 case OMP_CLAUSE_DEVICE
:
2193 case OMP_CLAUSE_NUM_TEAMS
:
2194 case OMP_CLAUSE_THREAD_LIMIT
:
2195 case OMP_CLAUSE_SAFELEN
:
2196 case OMP_CLAUSE_SIMDLEN
:
2197 case OMP_CLAUSE_PRIORITY
:
2198 case OMP_CLAUSE_GRAINSIZE
:
2199 case OMP_CLAUSE_NUM_TASKS
:
2200 case OMP_CLAUSE_HINT
:
2201 case OMP_CLAUSE_FILTER
:
2202 case OMP_CLAUSE_NUM_GANGS
:
2203 case OMP_CLAUSE_NUM_WORKERS
:
2204 case OMP_CLAUSE_VECTOR_LENGTH
:
2205 case OMP_CLAUSE_GANG
:
2206 case OMP_CLAUSE_WORKER
:
2207 case OMP_CLAUSE_VECTOR
:
2208 case OMP_CLAUSE_ASYNC
:
2209 case OMP_CLAUSE_WAIT
:
2210 /* Several OpenACC clauses have optional arguments. Check if they
2212 if (OMP_CLAUSE_OPERAND (clause
, 0))
2214 wi
->val_only
= true;
2216 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause
, 0),
2220 /* The gang clause accepts two arguments. */
2221 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_GANG
2222 && OMP_CLAUSE_GANG_STATIC_EXPR (clause
))
2224 wi
->val_only
= true;
2226 convert_nonlocal_reference_op
2227 (&OMP_CLAUSE_GANG_STATIC_EXPR (clause
), &dummy
, wi
);
2231 case OMP_CLAUSE_DIST_SCHEDULE
:
2232 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause
) != NULL
)
2234 wi
->val_only
= true;
2236 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause
, 0),
2241 case OMP_CLAUSE_MAP
:
2243 case OMP_CLAUSE_FROM
:
2244 if (OMP_CLAUSE_SIZE (clause
))
2246 wi
->val_only
= true;
2248 convert_local_reference_op (&OMP_CLAUSE_SIZE (clause
),
2251 if (DECL_P (OMP_CLAUSE_DECL (clause
)))
2252 goto do_decl_clause
;
2253 wi
->val_only
= true;
2255 walk_tree (&OMP_CLAUSE_DECL (clause
), convert_local_reference_op
,
2259 case OMP_CLAUSE_ALIGNED
:
2260 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
))
2262 wi
->val_only
= true;
2264 convert_local_reference_op
2265 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
), &dummy
, wi
);
2268 case OMP_CLAUSE_NONTEMPORAL
:
2269 do_decl_clause_no_supp
:
2270 /* Like do_decl_clause, but don't add any suppression. */
2271 decl
= OMP_CLAUSE_DECL (clause
);
2273 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
2275 if (decl_function_context (decl
) == info
->context
2276 && !use_pointer_in_frame (decl
))
2278 tree field
= lookup_field_for_decl (info
, decl
, NO_INSERT
);
2281 OMP_CLAUSE_DECL (clause
)
2282 = get_local_debug_decl (info
, decl
, field
);
2288 case OMP_CLAUSE_ALLOCATE
:
2289 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (clause
))
2291 wi
->val_only
= true;
2293 convert_local_reference_op
2294 (&OMP_CLAUSE_ALLOCATE_ALLOCATOR (clause
), &dummy
, wi
);
2296 goto do_decl_clause_no_supp
;
2298 case OMP_CLAUSE_NOWAIT
:
2299 case OMP_CLAUSE_ORDERED
:
2300 case OMP_CLAUSE_DEFAULT
:
2301 case OMP_CLAUSE_COPYIN
:
2302 case OMP_CLAUSE_COLLAPSE
:
2303 case OMP_CLAUSE_TILE
:
2304 case OMP_CLAUSE_UNTIED
:
2305 case OMP_CLAUSE_MERGEABLE
:
2306 case OMP_CLAUSE_PROC_BIND
:
2307 case OMP_CLAUSE_NOGROUP
:
2308 case OMP_CLAUSE_THREADS
:
2309 case OMP_CLAUSE_SIMD
:
2310 case OMP_CLAUSE_DEFAULTMAP
:
2311 case OMP_CLAUSE_ORDER
:
2312 case OMP_CLAUSE_SEQ
:
2313 case OMP_CLAUSE_INDEPENDENT
:
2314 case OMP_CLAUSE_AUTO
:
2315 case OMP_CLAUSE_IF_PRESENT
:
2316 case OMP_CLAUSE_FINALIZE
:
2317 case OMP_CLAUSE_BIND
:
2318 case OMP_CLAUSE__CONDTEMP_
:
2319 case OMP_CLAUSE__SCANTEMP_
:
2322 /* The following clause belongs to the OpenACC cache directive, which
2323 is discarded during gimplification. */
2324 case OMP_CLAUSE__CACHE_
:
2325 /* The following clauses are only allowed in the OpenMP declare simd
2326 directive, so not seen here. */
2327 case OMP_CLAUSE_UNIFORM
:
2328 case OMP_CLAUSE_INBRANCH
:
2329 case OMP_CLAUSE_NOTINBRANCH
:
2330 /* The following clauses are only allowed on OpenMP cancel and
2331 cancellation point directives, which at this point have already
2332 been lowered into a function call. */
2333 case OMP_CLAUSE_FOR
:
2334 case OMP_CLAUSE_PARALLEL
:
2335 case OMP_CLAUSE_SECTIONS
:
2336 case OMP_CLAUSE_TASKGROUP
:
2337 /* The following clauses are only added during OMP lowering; nested
2338 function decomposition happens before that. */
2339 case OMP_CLAUSE__LOOPTEMP_
:
2340 case OMP_CLAUSE__REDUCTEMP_
:
2341 case OMP_CLAUSE__SIMDUID_
:
2342 case OMP_CLAUSE__SIMT_
:
2343 /* The following clauses are only allowed on OpenACC 'routine'
2344 directives, not seen here. */
2345 case OMP_CLAUSE_NOHOST
:
2346 /* Anything else. */
2352 info
->suppress_expansion
= new_suppress
;
2355 for (clause
= *pclauses
; clause
; clause
= OMP_CLAUSE_CHAIN (clause
))
2356 switch (OMP_CLAUSE_CODE (clause
))
2358 case OMP_CLAUSE_REDUCTION
:
2359 case OMP_CLAUSE_IN_REDUCTION
:
2360 case OMP_CLAUSE_TASK_REDUCTION
:
2361 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
2364 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
));
2365 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
2367 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
2368 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
2370 walk_body (convert_local_reference_stmt
,
2371 convert_local_reference_op
, info
,
2372 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause
));
2373 walk_body (convert_local_reference_stmt
,
2374 convert_local_reference_op
, info
,
2375 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause
));
2376 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
2378 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
2379 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
2384 case OMP_CLAUSE_LASTPRIVATE
:
2385 walk_body (convert_local_reference_stmt
,
2386 convert_local_reference_op
, info
,
2387 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause
));
2390 case OMP_CLAUSE_LINEAR
:
2391 walk_body (convert_local_reference_stmt
,
2392 convert_local_reference_op
, info
,
2393 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause
));
2404 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
2405 and PARM_DECLs that were referenced by inner nested functions.
2406 The rewrite will be a structure reference to the local frame variable. */
2409 convert_local_reference_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
2410 struct walk_stmt_info
*wi
)
2412 struct nesting_info
*info
= (struct nesting_info
*) wi
->info
;
2413 tree save_local_var_chain
;
2414 bitmap save_suppress
;
2415 char save_static_chain_added
;
2416 bool frame_decl_added
;
2417 gimple
*stmt
= gsi_stmt (*gsi
);
2419 switch (gimple_code (stmt
))
2421 case GIMPLE_OMP_TEAMS
:
2422 if (!gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
2424 save_suppress
= info
->suppress_expansion
;
2425 convert_local_omp_clauses (gimple_omp_teams_clauses_ptr (stmt
), wi
);
2426 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
2427 info
, gimple_omp_body_ptr (stmt
));
2428 info
->suppress_expansion
= save_suppress
;
2433 case GIMPLE_OMP_PARALLEL
:
2434 case GIMPLE_OMP_TASK
:
2435 save_suppress
= info
->suppress_expansion
;
2436 frame_decl_added
= false;
2437 if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt
),
2440 tree c
= build_omp_clause (gimple_location (stmt
),
2442 (void) get_frame_type (info
);
2443 OMP_CLAUSE_DECL (c
) = info
->frame_decl
;
2444 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2445 gimple_omp_taskreg_set_clauses (stmt
, c
);
2446 info
->static_chain_added
|= 4;
2447 frame_decl_added
= true;
2450 save_local_var_chain
= info
->new_local_var_chain
;
2451 save_static_chain_added
= info
->static_chain_added
;
2452 info
->new_local_var_chain
= NULL
;
2453 info
->static_chain_added
= 0;
2455 walk_body (convert_local_reference_stmt
, convert_local_reference_op
, info
,
2456 gimple_omp_body_ptr (stmt
));
2458 if ((info
->static_chain_added
& 4) != 0 && !frame_decl_added
)
2460 tree c
= build_omp_clause (gimple_location (stmt
),
2462 (void) get_frame_type (info
);
2463 OMP_CLAUSE_DECL (c
) = info
->frame_decl
;
2464 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2465 info
->static_chain_added
|= 4;
2466 gimple_omp_taskreg_set_clauses (stmt
, c
);
2468 if (info
->new_local_var_chain
)
2469 declare_vars (info
->new_local_var_chain
,
2470 gimple_seq_first_stmt (gimple_omp_body (stmt
)), false);
2471 info
->new_local_var_chain
= save_local_var_chain
;
2472 info
->suppress_expansion
= save_suppress
;
2473 info
->static_chain_added
|= save_static_chain_added
;
2476 case GIMPLE_OMP_FOR
:
2477 save_suppress
= info
->suppress_expansion
;
2478 convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt
), wi
);
2479 walk_gimple_omp_for (as_a
<gomp_for
*> (stmt
),
2480 convert_local_reference_stmt
,
2481 convert_local_reference_op
, info
);
2482 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
2483 info
, gimple_omp_body_ptr (stmt
));
2484 info
->suppress_expansion
= save_suppress
;
2487 case GIMPLE_OMP_SECTIONS
:
2488 save_suppress
= info
->suppress_expansion
;
2489 convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt
), wi
);
2490 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
2491 info
, gimple_omp_body_ptr (stmt
));
2492 info
->suppress_expansion
= save_suppress
;
2495 case GIMPLE_OMP_SINGLE
:
2496 save_suppress
= info
->suppress_expansion
;
2497 convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt
), wi
);
2498 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
2499 info
, gimple_omp_body_ptr (stmt
));
2500 info
->suppress_expansion
= save_suppress
;
2503 case GIMPLE_OMP_SCOPE
:
2504 save_suppress
= info
->suppress_expansion
;
2505 convert_local_omp_clauses (gimple_omp_scope_clauses_ptr (stmt
), wi
);
2506 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
2507 info
, gimple_omp_body_ptr (stmt
));
2508 info
->suppress_expansion
= save_suppress
;
2511 case GIMPLE_OMP_TASKGROUP
:
2512 save_suppress
= info
->suppress_expansion
;
2513 convert_local_omp_clauses (gimple_omp_taskgroup_clauses_ptr (stmt
), wi
);
2514 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
2515 info
, gimple_omp_body_ptr (stmt
));
2516 info
->suppress_expansion
= save_suppress
;
2519 case GIMPLE_OMP_TARGET
:
2520 if (!is_gimple_omp_offloaded (stmt
))
2522 save_suppress
= info
->suppress_expansion
;
2523 convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt
), wi
);
2524 info
->suppress_expansion
= save_suppress
;
2525 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
2526 info
, gimple_omp_body_ptr (stmt
));
2529 save_suppress
= info
->suppress_expansion
;
2530 frame_decl_added
= false;
2531 if (convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt
), wi
))
2533 tree c
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE_MAP
);
2534 (void) get_frame_type (info
);
2535 OMP_CLAUSE_DECL (c
) = info
->frame_decl
;
2536 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_TOFROM
);
2537 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (info
->frame_decl
);
2538 OMP_CLAUSE_CHAIN (c
) = gimple_omp_target_clauses (stmt
);
2539 gimple_omp_target_set_clauses (as_a
<gomp_target
*> (stmt
), c
);
2540 info
->static_chain_added
|= 4;
2541 frame_decl_added
= true;
2544 save_local_var_chain
= info
->new_local_var_chain
;
2545 save_static_chain_added
= info
->static_chain_added
;
2546 info
->new_local_var_chain
= NULL
;
2547 info
->static_chain_added
= 0;
2549 walk_body (convert_local_reference_stmt
, convert_local_reference_op
, info
,
2550 gimple_omp_body_ptr (stmt
));
2552 if ((info
->static_chain_added
& 4) != 0 && !frame_decl_added
)
2554 tree c
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE_MAP
);
2555 (void) get_frame_type (info
);
2556 OMP_CLAUSE_DECL (c
) = info
->frame_decl
;
2557 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_TOFROM
);
2558 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (info
->frame_decl
);
2559 OMP_CLAUSE_CHAIN (c
) = gimple_omp_target_clauses (stmt
);
2560 gimple_omp_target_set_clauses (as_a
<gomp_target
*> (stmt
), c
);
2561 info
->static_chain_added
|= 4;
2564 if (info
->new_local_var_chain
)
2565 declare_vars (info
->new_local_var_chain
,
2566 gimple_seq_first_stmt (gimple_omp_body (stmt
)), false);
2567 info
->new_local_var_chain
= save_local_var_chain
;
2568 info
->suppress_expansion
= save_suppress
;
2569 info
->static_chain_added
|= save_static_chain_added
;
2572 case GIMPLE_OMP_SECTION
:
2573 case GIMPLE_OMP_STRUCTURED_BLOCK
:
2574 case GIMPLE_OMP_MASTER
:
2575 case GIMPLE_OMP_MASKED
:
2576 case GIMPLE_OMP_ORDERED
:
2577 case GIMPLE_OMP_SCAN
:
2578 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
2579 info
, gimple_omp_body_ptr (stmt
));
2583 wi
->val_only
= true;
2585 *handled_ops_p
= false;
2589 if (gimple_clobber_p (stmt
))
2591 tree lhs
= gimple_assign_lhs (stmt
);
2593 && decl_function_context (lhs
) == info
->context
2594 && !use_pointer_in_frame (lhs
)
2595 && lookup_field_for_decl (info
, lhs
, NO_INSERT
))
2597 gsi_replace (gsi
, gimple_build_nop (), true);
2601 *handled_ops_p
= false;
2605 for (tree var
= gimple_bind_vars (as_a
<gbind
*> (stmt
));
2607 var
= DECL_CHAIN (var
))
2608 if (TREE_CODE (var
) == NAMELIST_DECL
)
2610 /* Adjust decls mentioned in NAMELIST_DECL. */
2611 tree decls
= NAMELIST_DECL_ASSOCIATED_DECL (var
);
2615 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls
), i
, decl
)
2618 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
2620 if (decl_function_context (decl
) == info
->context
2621 && !use_pointer_in_frame (decl
))
2623 tree field
= lookup_field_for_decl (info
, decl
, NO_INSERT
);
2626 CONSTRUCTOR_ELT (decls
, i
)->value
2627 = get_local_debug_decl (info
, decl
, field
);
2633 *handled_ops_p
= false;
2637 /* For every other statement that we are not interested in
2638 handling here, let the walker traverse the operands. */
2639 *handled_ops_p
= false;
2643 /* Indicate that we have handled all the operands ourselves. */
2644 *handled_ops_p
= true;
2649 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
2650 that reference labels from outer functions. The rewrite will be a
2651 call to __builtin_nonlocal_goto. */
2654 convert_nl_goto_reference (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
2655 struct walk_stmt_info
*wi
)
2657 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
, *i
;
2658 tree label
, new_label
, target_context
, x
, field
;
2660 gimple
*stmt
= gsi_stmt (*gsi
);
2662 if (gimple_code (stmt
) != GIMPLE_GOTO
)
2664 *handled_ops_p
= false;
2668 label
= gimple_goto_dest (stmt
);
2669 if (TREE_CODE (label
) != LABEL_DECL
)
2671 *handled_ops_p
= false;
2675 target_context
= decl_function_context (label
);
2676 if (target_context
== info
->context
)
2678 *handled_ops_p
= false;
2682 for (i
= info
->outer
; target_context
!= i
->context
; i
= i
->outer
)
2685 /* The original user label may also be use for a normal goto, therefore
2686 we must create a new label that will actually receive the abnormal
2687 control transfer. This new label will be marked LABEL_NONLOCAL; this
2688 mark will trigger proper behavior in the cfg, as well as cause the
2689 (hairy target-specific) non-local goto receiver code to be generated
2690 when we expand rtl. Enter this association into var_map so that we
2691 can insert the new label into the IL during a second pass. */
2692 tree
*slot
= &i
->var_map
->get_or_insert (label
);
2695 new_label
= create_artificial_label (UNKNOWN_LOCATION
);
2696 DECL_NONLOCAL (new_label
) = 1;
2697 DECL_CONTEXT (new_label
) = target_context
;
2703 /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */
2704 field
= get_nl_goto_field (i
);
2705 x
= get_frame_field (info
, target_context
, field
, gsi
);
2707 x
= gsi_gimplify_val (info
, x
, gsi
);
2708 call
= gimple_build_call (builtin_decl_implicit (BUILT_IN_NONLOCAL_GOTO
),
2709 2, build_addr (new_label
), x
);
2710 gsi_replace (gsi
, call
, false);
2712 /* We have handled all of STMT's operands, no need to keep going. */
2713 *handled_ops_p
= true;
2718 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
2719 are referenced via nonlocal goto from a nested function. The rewrite
2720 will involve installing a newly generated DECL_NONLOCAL label, and
2721 (potentially) a branch around the rtl gunk that is assumed to be
2722 attached to such a label. */
2725 convert_nl_goto_receiver (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
2726 struct walk_stmt_info
*wi
)
2728 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
2729 tree label
, new_label
;
2730 gimple_stmt_iterator tmp_gsi
;
2731 glabel
*stmt
= dyn_cast
<glabel
*> (gsi_stmt (*gsi
));
2735 *handled_ops_p
= false;
2739 label
= gimple_label_label (stmt
);
2741 tree
*slot
= info
->var_map
->get (label
);
2744 *handled_ops_p
= false;
2748 /* If there's any possibility that the previous statement falls through,
2749 then we must branch around the new non-local label. */
2751 gsi_prev (&tmp_gsi
);
2752 if (gsi_end_p (tmp_gsi
) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi
)))
2754 gimple
*stmt
= gimple_build_goto (label
);
2755 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
2758 new_label
= (tree
) *slot
;
2759 stmt
= gimple_build_label (new_label
);
2760 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
2762 *handled_ops_p
= true;
2767 /* Called via walk_function+walk_stmt, rewrite all references to addresses
2768 of nested functions that require the use of trampolines. The rewrite
2769 will involve a reference a trampoline generated for the occasion. */
2772 convert_tramp_reference_op (tree
*tp
, int *walk_subtrees
, void *data
)
2774 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
2775 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
, *i
;
2776 tree t
= *tp
, decl
, target_context
, x
, builtin
;
2781 switch (TREE_CODE (t
))
2785 T.1 = &CHAIN->tramp;
2786 T.2 = __builtin_adjust_trampoline (T.1);
2787 T.3 = (func_type)T.2;
2790 decl
= TREE_OPERAND (t
, 0);
2791 if (TREE_CODE (decl
) != FUNCTION_DECL
)
2794 /* Only need to process nested functions. */
2795 target_context
= decl_function_context (decl
);
2796 if (!target_context
)
2799 /* If the nested function doesn't use a static chain, then
2800 it doesn't need a trampoline. */
2801 if (!DECL_STATIC_CHAIN (decl
))
2804 /* If we don't want a trampoline, then don't build one. */
2805 if (TREE_NO_TRAMPOLINE (t
))
2808 /* Lookup the immediate parent of the callee, as that's where
2809 we need to insert the trampoline. */
2810 for (i
= info
; i
->context
!= target_context
; i
= i
->outer
)
2813 /* Decide whether to generate a descriptor or a trampoline. */
2814 descr
= FUNC_ADDR_BY_DESCRIPTOR (t
) && !flag_trampolines
;
2817 x
= lookup_descr_for_decl (i
, decl
, INSERT
);
2819 x
= lookup_tramp_for_decl (i
, decl
, INSERT
);
2821 /* Compute the address of the field holding the trampoline. */
2822 x
= get_frame_field (info
, target_context
, x
, &wi
->gsi
);
2824 /* APB: We don't need to do the adjustment calls when using off-stack
2825 trampolines, any such adjustment will be done when the off-stack
2826 trampoline is created. */
2827 if (!descr
&& flag_trampoline_impl
== TRAMPOLINE_IMPL_HEAP
)
2828 x
= gsi_gimplify_val (info
, x
, &wi
->gsi
);
2833 x
= gsi_gimplify_val (info
, x
, &wi
->gsi
);
2835 /* Do machine-specific ugliness. Normally this will involve
2836 computing extra alignment, but it can really be anything. */
2838 builtin
= builtin_decl_implicit (BUILT_IN_ADJUST_DESCRIPTOR
);
2840 builtin
= builtin_decl_implicit (BUILT_IN_ADJUST_TRAMPOLINE
);
2841 call
= gimple_build_call (builtin
, 1, x
);
2842 x
= init_tmp_var_with_call (info
, &wi
->gsi
, call
);
2845 /* Cast back to the proper function type. */
2846 x
= build1 (NOP_EXPR
, TREE_TYPE (t
), x
);
2847 x
= init_tmp_var (info
, x
, &wi
->gsi
);
2853 if (!IS_TYPE_OR_DECL_P (t
))
2862 /* Called via walk_function+walk_gimple_stmt, rewrite all references
2863 to addresses of nested functions that require the use of
2864 trampolines. The rewrite will involve a reference a trampoline
2865 generated for the occasion. */
2868 convert_tramp_reference_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
2869 struct walk_stmt_info
*wi
)
2871 struct nesting_info
*info
= (struct nesting_info
*) wi
->info
;
2872 gimple
*stmt
= gsi_stmt (*gsi
);
2874 switch (gimple_code (stmt
))
2878 /* Only walk call arguments, lest we generate trampolines for
2880 unsigned long i
, nargs
= gimple_call_num_args (stmt
);
2881 for (i
= 0; i
< nargs
; i
++)
2882 walk_tree (gimple_call_arg_ptr (stmt
, i
), convert_tramp_reference_op
,
2887 case GIMPLE_OMP_TEAMS
:
2888 if (!gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
2890 *handled_ops_p
= false;
2895 case GIMPLE_OMP_TARGET
:
2896 if (!is_gimple_omp_offloaded (stmt
))
2898 *handled_ops_p
= false;
2902 case GIMPLE_OMP_PARALLEL
:
2903 case GIMPLE_OMP_TASK
:
2906 tree save_local_var_chain
= info
->new_local_var_chain
;
2907 walk_gimple_op (stmt
, convert_tramp_reference_op
, wi
);
2908 info
->new_local_var_chain
= NULL
;
2909 char save_static_chain_added
= info
->static_chain_added
;
2910 info
->static_chain_added
= 0;
2911 walk_body (convert_tramp_reference_stmt
, convert_tramp_reference_op
,
2912 info
, gimple_omp_body_ptr (stmt
));
2913 if (info
->new_local_var_chain
)
2914 declare_vars (info
->new_local_var_chain
,
2915 gimple_seq_first_stmt (gimple_omp_body (stmt
)),
2917 for (int i
= 0; i
< 2; i
++)
2920 if ((info
->static_chain_added
& (1 << i
)) == 0)
2922 decl
= i
? get_chain_decl (info
) : info
->frame_decl
;
2923 /* Don't add CHAIN.* or FRAME.* twice. */
2924 if (gimple_code (stmt
) == GIMPLE_OMP_TARGET
)
2925 c
= gimple_omp_target_clauses (stmt
);
2927 c
= gimple_omp_taskreg_clauses (stmt
);
2928 for (; c
; c
= OMP_CLAUSE_CHAIN (c
))
2929 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
2930 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
)
2931 && OMP_CLAUSE_DECL (c
) == decl
)
2933 if (c
== NULL
&& gimple_code (stmt
) != GIMPLE_OMP_TARGET
)
2935 c
= build_omp_clause (gimple_location (stmt
),
2936 i
? OMP_CLAUSE_FIRSTPRIVATE
2937 : OMP_CLAUSE_SHARED
);
2938 OMP_CLAUSE_DECL (c
) = decl
;
2939 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2940 gimple_omp_taskreg_set_clauses (stmt
, c
);
2944 c
= build_omp_clause (gimple_location (stmt
),
2946 OMP_CLAUSE_DECL (c
) = decl
;
2947 OMP_CLAUSE_SET_MAP_KIND (c
,
2948 i
? GOMP_MAP_TO
: GOMP_MAP_TOFROM
);
2949 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
2950 OMP_CLAUSE_CHAIN (c
) = gimple_omp_target_clauses (stmt
);
2951 gimple_omp_target_set_clauses (as_a
<gomp_target
*> (stmt
),
2955 info
->new_local_var_chain
= save_local_var_chain
;
2956 info
->static_chain_added
|= save_static_chain_added
;
2961 *handled_ops_p
= false;
2965 *handled_ops_p
= true;
2971 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
2972 that reference nested functions to make sure that the static chain
2973 is set up properly for the call. */
2976 convert_gimple_call (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
2977 struct walk_stmt_info
*wi
)
2979 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
2980 tree decl
, target_context
;
2981 char save_static_chain_added
;
2983 gimple
*stmt
= gsi_stmt (*gsi
);
2985 switch (gimple_code (stmt
))
2988 if (gimple_call_chain (stmt
))
2990 decl
= gimple_call_fndecl (stmt
);
2993 target_context
= decl_function_context (decl
);
2994 if (target_context
&& DECL_STATIC_CHAIN (decl
))
2996 struct nesting_info
*i
= info
;
2997 while (i
&& i
->context
!= target_context
)
2999 /* If none of the outer contexts is the target context, this means
3000 that the function is called in a wrong context. */
3002 internal_error ("%s from %s called in %s",
3003 IDENTIFIER_POINTER (DECL_NAME (decl
)),
3004 IDENTIFIER_POINTER (DECL_NAME (target_context
)),
3005 IDENTIFIER_POINTER (DECL_NAME (info
->context
)));
3007 gimple_call_set_chain (as_a
<gcall
*> (stmt
),
3008 get_static_chain (info
, target_context
,
3010 info
->static_chain_added
|= (1 << (info
->context
!= target_context
));
3014 case GIMPLE_OMP_TEAMS
:
3015 if (!gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
3017 walk_body (convert_gimple_call
, NULL
, info
,
3018 gimple_omp_body_ptr (stmt
));
3023 case GIMPLE_OMP_PARALLEL
:
3024 case GIMPLE_OMP_TASK
:
3025 save_static_chain_added
= info
->static_chain_added
;
3026 info
->static_chain_added
= 0;
3027 walk_body (convert_gimple_call
, NULL
, info
, gimple_omp_body_ptr (stmt
));
3028 for (i
= 0; i
< 2; i
++)
3031 if ((info
->static_chain_added
& (1 << i
)) == 0)
3033 decl
= i
? get_chain_decl (info
) : info
->frame_decl
;
3034 /* Don't add CHAIN.* or FRAME.* twice. */
3035 for (c
= gimple_omp_taskreg_clauses (stmt
);
3037 c
= OMP_CLAUSE_CHAIN (c
))
3038 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
3039 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
)
3040 && OMP_CLAUSE_DECL (c
) == decl
)
3044 c
= build_omp_clause (gimple_location (stmt
),
3045 i
? OMP_CLAUSE_FIRSTPRIVATE
3046 : OMP_CLAUSE_SHARED
);
3047 OMP_CLAUSE_DECL (c
) = decl
;
3048 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
3049 gimple_omp_taskreg_set_clauses (stmt
, c
);
3052 info
->static_chain_added
|= save_static_chain_added
;
3055 case GIMPLE_OMP_TARGET
:
3056 if (!is_gimple_omp_offloaded (stmt
))
3058 walk_body (convert_gimple_call
, NULL
, info
, gimple_omp_body_ptr (stmt
));
3061 save_static_chain_added
= info
->static_chain_added
;
3062 info
->static_chain_added
= 0;
3063 walk_body (convert_gimple_call
, NULL
, info
, gimple_omp_body_ptr (stmt
));
3064 for (i
= 0; i
< 2; i
++)
3067 if ((info
->static_chain_added
& (1 << i
)) == 0)
3069 decl
= i
? get_chain_decl (info
) : info
->frame_decl
;
3070 /* Don't add CHAIN.* or FRAME.* twice. */
3071 for (c
= gimple_omp_target_clauses (stmt
);
3073 c
= OMP_CLAUSE_CHAIN (c
))
3074 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
3075 && OMP_CLAUSE_DECL (c
) == decl
)
3079 c
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE_MAP
);
3080 OMP_CLAUSE_DECL (c
) = decl
;
3081 OMP_CLAUSE_SET_MAP_KIND (c
, i
? GOMP_MAP_TO
: GOMP_MAP_TOFROM
);
3082 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
3083 OMP_CLAUSE_CHAIN (c
) = gimple_omp_target_clauses (stmt
);
3084 gimple_omp_target_set_clauses (as_a
<gomp_target
*> (stmt
),
3088 info
->static_chain_added
|= save_static_chain_added
;
3091 case GIMPLE_OMP_FOR
:
3092 walk_body (convert_gimple_call
, NULL
, info
,
3093 gimple_omp_for_pre_body_ptr (stmt
));
3095 case GIMPLE_OMP_SECTIONS
:
3096 case GIMPLE_OMP_SECTION
:
3097 case GIMPLE_OMP_STRUCTURED_BLOCK
:
3098 case GIMPLE_OMP_SINGLE
:
3099 case GIMPLE_OMP_SCOPE
:
3100 case GIMPLE_OMP_MASTER
:
3101 case GIMPLE_OMP_MASKED
:
3102 case GIMPLE_OMP_TASKGROUP
:
3103 case GIMPLE_OMP_ORDERED
:
3104 case GIMPLE_OMP_SCAN
:
3105 case GIMPLE_OMP_CRITICAL
:
3106 walk_body (convert_gimple_call
, NULL
, info
, gimple_omp_body_ptr (stmt
));
3110 /* Keep looking for other operands. */
3111 *handled_ops_p
= false;
3115 *handled_ops_p
= true;
3119 /* Walk the nesting tree starting with ROOT. Convert all trampolines and
3120 call expressions. At the same time, determine if a nested function
3121 actually uses its static chain; if not, remember that. */
3124 convert_all_function_calls (struct nesting_info
*root
)
3126 unsigned int chain_count
= 0, old_chain_count
, iter_count
;
3127 struct nesting_info
*n
;
3129 /* First, optimistically clear static_chain for all decls that haven't
3130 used the static chain already for variable access. But always create
3131 it if not optimizing. This makes it possible to reconstruct the static
3132 nesting tree at run time and thus to resolve up-level references from
3133 within the debugger. */
3134 FOR_EACH_NEST_INFO (n
, root
)
3138 tree decl
= n
->context
;
3142 (void) get_frame_type (n
);
3144 (void) get_chain_decl (n
);
3146 else if (!n
->outer
|| (!n
->chain_decl
&& !n
->chain_field
))
3148 DECL_STATIC_CHAIN (decl
) = 0;
3149 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3150 fprintf (dump_file
, "Guessing no static-chain for %s\n",
3151 lang_hooks
.decl_printable_name (decl
, 2));
3154 DECL_STATIC_CHAIN (decl
) = 1;
3155 chain_count
+= DECL_STATIC_CHAIN (decl
);
3158 FOR_EACH_NEST_INFO (n
, root
)
3161 tree decl
= n
->context
;
3162 tree alias
= thunk_info::get (cgraph_node::get (decl
))->alias
;
3163 DECL_STATIC_CHAIN (decl
) = DECL_STATIC_CHAIN (alias
);
3166 /* Walk the functions and perform transformations. Note that these
3167 transformations can induce new uses of the static chain, which in turn
3168 require re-examining all users of the decl. */
3169 /* ??? It would make sense to try to use the call graph to speed this up,
3170 but the call graph hasn't really been built yet. Even if it did, we
3171 would still need to iterate in this loop since address-of references
3172 wouldn't show up in the callgraph anyway. */
3176 old_chain_count
= chain_count
;
3180 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3181 fputc ('\n', dump_file
);
3183 FOR_EACH_NEST_INFO (n
, root
)
3187 tree decl
= n
->context
;
3188 walk_function (convert_tramp_reference_stmt
,
3189 convert_tramp_reference_op
, n
);
3190 walk_function (convert_gimple_call
, NULL
, n
);
3191 chain_count
+= DECL_STATIC_CHAIN (decl
);
3194 FOR_EACH_NEST_INFO (n
, root
)
3197 tree decl
= n
->context
;
3198 tree alias
= thunk_info::get (cgraph_node::get (decl
))->alias
;
3199 DECL_STATIC_CHAIN (decl
) = DECL_STATIC_CHAIN (alias
);
3202 while (chain_count
!= old_chain_count
);
3204 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3205 fprintf (dump_file
, "convert_all_function_calls iterations: %u\n\n",
3209 struct nesting_copy_body_data
3212 struct nesting_info
*root
;
3215 /* A helper subroutine for debug_var_chain type remapping. */
3218 nesting_copy_decl (tree decl
, copy_body_data
*id
)
3220 struct nesting_copy_body_data
*nid
= (struct nesting_copy_body_data
*) id
;
3221 tree
*slot
= nid
->root
->var_map
->get (decl
);
3224 return (tree
) *slot
;
3226 if (TREE_CODE (decl
) == TYPE_DECL
&& DECL_ORIGINAL_TYPE (decl
))
3228 tree new_decl
= copy_decl_no_change (decl
, id
);
3229 DECL_ORIGINAL_TYPE (new_decl
)
3230 = remap_type (DECL_ORIGINAL_TYPE (decl
), id
);
3235 || TREE_CODE (decl
) == PARM_DECL
3236 || TREE_CODE (decl
) == RESULT_DECL
)
3239 return copy_decl_no_change (decl
, id
);
3242 /* A helper function for remap_vla_decls. See if *TP contains
3243 some remapped variables. */
3246 contains_remapped_vars (tree
*tp
, int *walk_subtrees
, void *data
)
3248 struct nesting_info
*root
= (struct nesting_info
*) data
;
3254 tree
*slot
= root
->var_map
->get (t
);
3262 /* Remap VLA decls in BLOCK and subblocks if remapped variables are
3266 remap_vla_decls (tree block
, struct nesting_info
*root
)
3268 tree var
, subblock
, val
, type
;
3269 struct nesting_copy_body_data id
;
3271 for (subblock
= BLOCK_SUBBLOCKS (block
);
3273 subblock
= BLOCK_CHAIN (subblock
))
3274 remap_vla_decls (subblock
, root
);
3276 for (var
= BLOCK_VARS (block
); var
; var
= DECL_CHAIN (var
))
3277 if (VAR_P (var
) && DECL_HAS_VALUE_EXPR_P (var
))
3279 val
= DECL_VALUE_EXPR (var
);
3280 type
= TREE_TYPE (var
);
3282 if (! (INDIRECT_REF_P (val
)
3283 && VAR_P (TREE_OPERAND (val
, 0))
3284 && variably_modified_type_p (type
, NULL
)))
3287 if (root
->var_map
->get (TREE_OPERAND (val
, 0))
3288 || walk_tree (&type
, contains_remapped_vars
, root
, NULL
))
3292 if (var
== NULL_TREE
)
3295 memset (&id
, 0, sizeof (id
));
3296 id
.cb
.copy_decl
= nesting_copy_decl
;
3297 id
.cb
.decl_map
= new hash_map
<tree
, tree
>;
3300 for (; var
; var
= DECL_CHAIN (var
))
3301 if (VAR_P (var
) && DECL_HAS_VALUE_EXPR_P (var
))
3303 struct nesting_info
*i
;
3306 val
= DECL_VALUE_EXPR (var
);
3307 type
= TREE_TYPE (var
);
3309 if (! (INDIRECT_REF_P (val
)
3310 && VAR_P (TREE_OPERAND (val
, 0))
3311 && variably_modified_type_p (type
, NULL
)))
3314 tree
*slot
= root
->var_map
->get (TREE_OPERAND (val
, 0));
3315 if (!slot
&& !walk_tree (&type
, contains_remapped_vars
, root
, NULL
))
3318 context
= decl_function_context (var
);
3319 for (i
= root
; i
; i
= i
->outer
)
3320 if (i
->context
== context
)
3326 /* Fully expand value expressions. This avoids having debug variables
3327 only referenced from them and that can be swept during GC. */
3330 tree t
= (tree
) *slot
;
3331 gcc_assert (DECL_P (t
) && DECL_HAS_VALUE_EXPR_P (t
));
3332 val
= build1 (INDIRECT_REF
, TREE_TYPE (val
), DECL_VALUE_EXPR (t
));
3335 id
.cb
.src_fn
= i
->context
;
3336 id
.cb
.dst_fn
= i
->context
;
3337 id
.cb
.src_cfun
= DECL_STRUCT_FUNCTION (root
->context
);
3339 TREE_TYPE (var
) = newt
= remap_type (type
, &id
.cb
);
3340 while (POINTER_TYPE_P (newt
) && !TYPE_NAME (newt
))
3342 newt
= TREE_TYPE (newt
);
3343 type
= TREE_TYPE (type
);
3345 if (TYPE_NAME (newt
)
3346 && TREE_CODE (TYPE_NAME (newt
)) == TYPE_DECL
3347 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt
))
3349 && TYPE_NAME (newt
) == TYPE_NAME (type
))
3350 TYPE_NAME (newt
) = remap_decl (TYPE_NAME (newt
), &id
.cb
);
3352 walk_tree (&val
, copy_tree_body_r
, &id
.cb
, NULL
);
3353 if (val
!= DECL_VALUE_EXPR (var
))
3354 SET_DECL_VALUE_EXPR (var
, val
);
3357 delete id
.cb
.decl_map
;
3360 /* Fixup VLA decls in BLOCK and subblocks if remapped variables are
3364 fixup_vla_decls (tree block
)
3366 for (tree var
= BLOCK_VARS (block
); var
; var
= DECL_CHAIN (var
))
3367 if (VAR_P (var
) && DECL_HAS_VALUE_EXPR_P (var
))
3369 tree val
= DECL_VALUE_EXPR (var
);
3371 if (! (INDIRECT_REF_P (val
)
3372 && VAR_P (TREE_OPERAND (val
, 0))
3373 && DECL_HAS_VALUE_EXPR_P (TREE_OPERAND (val
, 0))))
3376 /* Fully expand value expressions. This avoids having debug variables
3377 only referenced from them and that can be swept during GC. */
3378 val
= build1 (INDIRECT_REF
, TREE_TYPE (val
),
3379 DECL_VALUE_EXPR (TREE_OPERAND (val
, 0)));
3380 SET_DECL_VALUE_EXPR (var
, val
);
3383 for (tree sub
= BLOCK_SUBBLOCKS (block
); sub
; sub
= BLOCK_CHAIN (sub
))
3384 fixup_vla_decls (sub
);
3387 /* Fold the MEM_REF *E. */
3389 fold_mem_refs (tree
*const &e
, void *data ATTRIBUTE_UNUSED
)
3391 tree
*ref_p
= CONST_CAST2 (tree
*, const tree
*, (const tree
*)e
);
3392 *ref_p
= fold (*ref_p
);
3396 /* Given DECL, a nested function, build an initialization call for FIELD,
3397 the trampoline or descriptor for DECL, using FUNC as the function. */
3400 build_init_call_stmt (struct nesting_info
*info
, tree decl
, tree field
,
3403 tree arg1
, arg2
, arg3
, x
;
3405 gcc_assert (DECL_STATIC_CHAIN (decl
));
3406 arg3
= build_addr (info
->frame_decl
);
3408 arg2
= build_addr (decl
);
3410 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
),
3411 info
->frame_decl
, field
, NULL_TREE
);
3412 arg1
= build_addr (x
);
3414 return gimple_build_call (func
, 3, arg1
, arg2
, arg3
);
3417 /* Do "everything else" to clean up or complete state collected by the various
3418 walking passes -- create a field to hold the frame base address, lay out the
3419 types and decls, generate code to initialize the frame decl, store critical
3420 expressions in the struct function for rtl to find. */
3423 finalize_nesting_tree_1 (struct nesting_info
*root
)
3425 gimple_seq cleanup_list
= NULL
;
3426 gimple_seq stmt_list
= NULL
;
3428 tree context
= root
->context
;
3429 struct function
*sf
;
3434 /* If we created a non-local frame type or decl, we need to lay them
3435 out at this time. */
3436 if (root
->frame_type
)
3438 /* Debugging information needs to compute the frame base address of the
3439 parent frame out of the static chain from the nested frame.
3441 The static chain is the address of the FRAME record, so one could
3442 imagine it would be possible to compute the frame base address just
3443 adding a constant offset to this address. Unfortunately, this is not
3444 possible: if the FRAME object has alignment constraints that are
3445 stronger than the stack, then the offset between the frame base and
3446 the FRAME object will be dynamic.
3448 What we do instead is to append a field to the FRAME object that holds
3449 the frame base address: then debug info just has to fetch this
3452 /* Debugging information will refer to the CFA as the frame base
3453 address: we will do the same here. */
3454 const tree frame_addr_fndecl
3455 = builtin_decl_explicit (BUILT_IN_DWARF_CFA
);
3457 /* Create a field in the FRAME record to hold the frame base address for
3458 this stack frame. Since it will be used only by the debugger, put it
3459 at the end of the record in order not to shift all other offsets. */
3460 tree fb_decl
= make_node (FIELD_DECL
);
3462 DECL_NAME (fb_decl
) = get_identifier ("FRAME_BASE.PARENT");
3463 TREE_TYPE (fb_decl
) = ptr_type_node
;
3464 TREE_ADDRESSABLE (fb_decl
) = 1;
3465 DECL_CONTEXT (fb_decl
) = root
->frame_type
;
3466 TYPE_FIELDS (root
->frame_type
) = chainon (TYPE_FIELDS (root
->frame_type
),
3469 /* In some cases the frame type will trigger the -Wpadded warning.
3470 This is not helpful; suppress it. */
3471 int save_warn_padded
= warn_padded
;
3473 layout_type (root
->frame_type
);
3474 warn_padded
= save_warn_padded
;
3475 layout_decl (root
->frame_decl
, 0);
3477 /* Initialize the frame base address field. If the builtin we need is
3478 not available, set it to NULL so that debugging information does not
3480 tree fb_ref
= build3 (COMPONENT_REF
, TREE_TYPE (fb_decl
),
3481 root
->frame_decl
, fb_decl
, NULL_TREE
);
3484 if (frame_addr_fndecl
!= NULL_TREE
)
3486 gcall
*fb_gimple
= gimple_build_call (frame_addr_fndecl
, 1,
3488 gimple_stmt_iterator gsi
= gsi_last (stmt_list
);
3490 fb_tmp
= init_tmp_var_with_call (root
, &gsi
, fb_gimple
);
3493 fb_tmp
= build_int_cst (TREE_TYPE (fb_ref
), 0);
3494 gimple_seq_add_stmt (&stmt_list
,
3495 gimple_build_assign (fb_ref
, fb_tmp
));
3497 declare_vars (root
->frame_decl
,
3498 gimple_seq_first_stmt (gimple_body (context
)), true);
3501 /* If any parameters were referenced non-locally, then we need to insert
3502 a copy or a pointer. */
3503 if (root
->any_parm_remapped
)
3506 for (p
= DECL_ARGUMENTS (context
); p
; p
= DECL_CHAIN (p
))
3510 field
= lookup_field_for_decl (root
, p
, NO_INSERT
);
3514 if (use_pointer_in_frame (p
))
3519 /* If the assignment is from a non-register the stmt is
3520 not valid gimple. Make it so by using a temporary instead. */
3521 if (!is_gimple_reg (x
)
3522 && is_gimple_reg_type (TREE_TYPE (x
)))
3524 gimple_stmt_iterator gsi
= gsi_last (stmt_list
);
3525 x
= init_tmp_var (root
, x
, &gsi
);
3528 y
= build3 (COMPONENT_REF
, TREE_TYPE (field
),
3529 root
->frame_decl
, field
, NULL_TREE
);
3530 stmt
= gimple_build_assign (y
, x
);
3531 gimple_seq_add_stmt (&stmt_list
, stmt
);
3535 /* If a chain_field was created, then it needs to be initialized
3537 if (root
->chain_field
)
3539 tree x
= build3 (COMPONENT_REF
, TREE_TYPE (root
->chain_field
),
3540 root
->frame_decl
, root
->chain_field
, NULL_TREE
);
3541 stmt
= gimple_build_assign (x
, get_chain_decl (root
));
3542 gimple_seq_add_stmt (&stmt_list
, stmt
);
3545 /* If trampolines were created, then we need to initialize them. */
3546 if (root
->any_tramp_created
)
3548 struct nesting_info
*i
;
3549 for (i
= root
->inner
; i
; i
= i
->next
)
3553 field
= lookup_tramp_for_decl (root
, i
->context
, NO_INSERT
);
3557 if (flag_trampoline_impl
== TRAMPOLINE_IMPL_HEAP
)
3559 /* We pass a whole bunch of arguments to the builtin function that
3560 creates the off-stack trampoline, these are
3561 1. The nested function chain value (that must be passed to the
3562 nested function so it can find the function arguments).
3563 2. A pointer to the nested function implementation,
3564 3. The address in the local stack frame where we should write
3565 the address of the trampoline.
3567 When this code was originally written I just kind of threw
3568 everything at the builtin, figuring I'd work out what was
3569 actually needed later, I think, the stack pointer could
3570 certainly be dropped, arguments #2 and #4 are based off the
3571 stack pointer anyway, so #1 doesn't seem to add much value. */
3572 tree arg1
, arg2
, arg3
;
3574 gcc_assert (DECL_STATIC_CHAIN (i
->context
));
3575 arg1
= build_addr (root
->frame_decl
);
3576 arg2
= build_addr (i
->context
);
3578 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
),
3579 root
->frame_decl
, field
, NULL_TREE
);
3580 arg3
= build_addr (x
);
3582 x
= builtin_decl_explicit (BUILT_IN_GCC_NESTED_PTR_CREATED
);
3583 stmt
= gimple_build_call (x
, 3, arg1
, arg2
, arg3
);
3584 gimple_seq_add_stmt (&stmt_list
, stmt
);
3586 /* This call to delete the nested function trampoline is added to
3587 the cleanup list, and called when we exit the current scope. */
3588 x
= builtin_decl_explicit (BUILT_IN_GCC_NESTED_PTR_DELETED
);
3589 stmt
= gimple_build_call (x
, 0);
3590 gimple_seq_add_stmt (&cleanup_list
, stmt
);
3594 /* Original code to initialise the on stack trampoline. */
3595 x
= builtin_decl_implicit (BUILT_IN_INIT_TRAMPOLINE
);
3596 stmt
= build_init_call_stmt (root
, i
->context
, field
, x
);
3597 gimple_seq_add_stmt (&stmt_list
, stmt
);
3602 /* If descriptors were created, then we need to initialize them. */
3603 if (root
->any_descr_created
)
3605 struct nesting_info
*i
;
3606 for (i
= root
->inner
; i
; i
= i
->next
)
3610 field
= lookup_descr_for_decl (root
, i
->context
, NO_INSERT
);
3614 x
= builtin_decl_implicit (BUILT_IN_INIT_DESCRIPTOR
);
3615 stmt
= build_init_call_stmt (root
, i
->context
, field
, x
);
3616 gimple_seq_add_stmt (&stmt_list
, stmt
);
3620 /* If we created initialization statements, insert them. */
3623 if (flag_trampoline_impl
== TRAMPOLINE_IMPL_HEAP
)
3625 /* Handle off-stack trampolines. */
3627 annotate_all_with_location (stmt_list
, DECL_SOURCE_LOCATION (context
));
3628 annotate_all_with_location (cleanup_list
, DECL_SOURCE_LOCATION (context
));
3629 bind
= gimple_seq_first_stmt_as_a_bind (gimple_body (context
));
3630 gimple_seq_add_seq (&stmt_list
, gimple_bind_body (bind
));
3632 gimple_seq xxx_list
= NULL
;
3634 if (cleanup_list
!= NULL
)
3636 /* Maybe we shouldn't be creating this try/finally if -fno-exceptions is
3637 in use. If this is the case, then maybe we should, instead, be
3638 inserting the cleanup code onto every path out of this function? Not
3639 yet figured out how we would do this. */
3640 gtry
*t
= gimple_build_try (stmt_list
, cleanup_list
, GIMPLE_TRY_FINALLY
);
3641 gimple_seq_add_stmt (&xxx_list
, t
);
3644 xxx_list
= stmt_list
;
3646 gimple_bind_set_body (bind
, xxx_list
);
3650 /* The traditional, on stack trampolines. */
3652 annotate_all_with_location (stmt_list
, DECL_SOURCE_LOCATION (context
));
3653 bind
= gimple_seq_first_stmt_as_a_bind (gimple_body (context
));
3654 gimple_seq_add_seq (&stmt_list
, gimple_bind_body (bind
));
3655 gimple_bind_set_body (bind
, stmt_list
);
3659 /* If a chain_decl was created, then it needs to be registered with
3660 struct function so that it gets initialized from the static chain
3661 register at the beginning of the function. */
3662 sf
= DECL_STRUCT_FUNCTION (root
->context
);
3663 sf
->static_chain_decl
= root
->chain_decl
;
3665 /* Similarly for the non-local goto save area. */
3666 if (root
->nl_goto_field
)
3668 sf
->nonlocal_goto_save_area
3669 = get_frame_field (root
, context
, root
->nl_goto_field
, NULL
);
3670 sf
->has_nonlocal_label
= 1;
3673 /* Make sure all new local variables get inserted into the
3674 proper BIND_EXPR. */
3675 if (root
->new_local_var_chain
)
3676 declare_vars (root
->new_local_var_chain
,
3677 gimple_seq_first_stmt (gimple_body (root
->context
)),
3680 if (root
->debug_var_chain
)
3685 remap_vla_decls (DECL_INITIAL (root
->context
), root
);
3687 for (debug_var
= root
->debug_var_chain
; debug_var
;
3688 debug_var
= DECL_CHAIN (debug_var
))
3689 if (variably_modified_type_p (TREE_TYPE (debug_var
), NULL
))
3692 /* If there are any debug decls with variable length types,
3693 remap those types using other debug_var_chain variables. */
3696 struct nesting_copy_body_data id
;
3698 memset (&id
, 0, sizeof (id
));
3699 id
.cb
.copy_decl
= nesting_copy_decl
;
3700 id
.cb
.decl_map
= new hash_map
<tree
, tree
>;
3703 for (; debug_var
; debug_var
= DECL_CHAIN (debug_var
))
3704 if (variably_modified_type_p (TREE_TYPE (debug_var
), NULL
))
3706 tree type
= TREE_TYPE (debug_var
);
3707 tree newt
, t
= type
;
3708 struct nesting_info
*i
;
3710 for (i
= root
; i
; i
= i
->outer
)
3711 if (variably_modified_type_p (type
, i
->context
))
3717 id
.cb
.src_fn
= i
->context
;
3718 id
.cb
.dst_fn
= i
->context
;
3719 id
.cb
.src_cfun
= DECL_STRUCT_FUNCTION (root
->context
);
3721 TREE_TYPE (debug_var
) = newt
= remap_type (type
, &id
.cb
);
3722 while (POINTER_TYPE_P (newt
) && !TYPE_NAME (newt
))
3724 newt
= TREE_TYPE (newt
);
3727 if (TYPE_NAME (newt
)
3728 && TREE_CODE (TYPE_NAME (newt
)) == TYPE_DECL
3729 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt
))
3731 && TYPE_NAME (newt
) == TYPE_NAME (t
))
3732 TYPE_NAME (newt
) = remap_decl (TYPE_NAME (newt
), &id
.cb
);
3735 delete id
.cb
.decl_map
;
3738 scope
= gimple_seq_first_stmt_as_a_bind (gimple_body (root
->context
));
3739 if (gimple_bind_block (scope
))
3740 declare_vars (root
->debug_var_chain
, scope
, true);
3742 BLOCK_VARS (DECL_INITIAL (root
->context
))
3743 = chainon (BLOCK_VARS (DECL_INITIAL (root
->context
)),
3744 root
->debug_var_chain
);
3747 fixup_vla_decls (DECL_INITIAL (root
->context
));
3749 /* Fold the rewritten MEM_REF trees. */
3750 root
->mem_refs
->traverse
<void *, fold_mem_refs
> (NULL
);
3752 /* Dump the translated tree function. */
3755 fputs ("\n\n", dump_file
);
3756 dump_function_to_file (root
->context
, dump_file
, dump_flags
);
3761 finalize_nesting_tree (struct nesting_info
*root
)
3763 struct nesting_info
*n
;
3764 FOR_EACH_NEST_INFO (n
, root
)
3765 finalize_nesting_tree_1 (n
);
3768 /* Unnest the nodes and pass them to cgraph. */
3771 unnest_nesting_tree_1 (struct nesting_info
*root
)
3773 struct cgraph_node
*node
= cgraph_node::get (root
->context
);
3775 /* For nested functions update the cgraph to reflect unnesting.
3776 We also delay finalizing of these functions up to this point. */
3777 if (nested_function_info::get (node
)->origin
)
3779 unnest_function (node
);
3781 cgraph_node::finalize_function (root
->context
, true);
3786 unnest_nesting_tree (struct nesting_info
*root
)
3788 struct nesting_info
*n
;
3789 FOR_EACH_NEST_INFO (n
, root
)
3790 unnest_nesting_tree_1 (n
);
3793 /* Free the data structures allocated during this pass. */
3796 free_nesting_tree (struct nesting_info
*root
)
3798 struct nesting_info
*node
, *next
;
3800 node
= iter_nestinfo_start (root
);
3803 next
= iter_nestinfo_next (node
);
3804 delete node
->var_map
;
3805 delete node
->field_map
;
3806 delete node
->mem_refs
;
3813 /* Gimplify a function and all its nested functions. */
3815 gimplify_all_functions (struct cgraph_node
*root
)
3817 struct cgraph_node
*iter
;
3818 if (!gimple_body (root
->decl
))
3819 gimplify_function_tree (root
->decl
);
3820 for (iter
= first_nested_function (root
); iter
;
3821 iter
= next_nested_function (iter
))
3823 gimplify_all_functions (iter
);
3826 /* Main entry point for this pass. Process FNDECL and all of its nested
3827 subroutines and turn them into something less tightly bound. */
3830 lower_nested_functions (tree fndecl
)
3832 struct cgraph_node
*cgn
;
3833 struct nesting_info
*root
;
3835 /* If there are no nested functions, there's nothing to do. */
3836 cgn
= cgraph_node::get (fndecl
);
3837 if (!first_nested_function (cgn
))
3840 gimplify_all_functions (cgn
);
3842 set_dump_file (dump_begin (TDI_nested
, &dump_flags
));
3844 fprintf (dump_file
, "\n;; Function %s\n\n",
3845 lang_hooks
.decl_printable_name (fndecl
, 2));
3847 bitmap_obstack_initialize (&nesting_info_bitmap_obstack
);
3848 root
= create_nesting_tree (cgn
);
3850 walk_all_functions (convert_nonlocal_reference_stmt
,
3851 convert_nonlocal_reference_op
,
3853 walk_all_functions (convert_local_reference_stmt
,
3854 convert_local_reference_op
,
3856 walk_all_functions (convert_nl_goto_reference
, NULL
, root
);
3857 walk_all_functions (convert_nl_goto_receiver
, NULL
, root
);
3859 convert_all_function_calls (root
);
3860 finalize_nesting_tree (root
);
3861 unnest_nesting_tree (root
);
3863 free_nesting_tree (root
);
3864 bitmap_obstack_release (&nesting_info_bitmap_obstack
);
3868 dump_end (TDI_nested
, dump_file
);
3869 set_dump_file (NULL
);
3873 #include "gt-tree-nested.h"