1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
4 Copyright (C) 2003, 2004, 2005, 2007 Free Software Foundation, Inc.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 2, or (at your option) any
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
26 #include "coretypes.h"
31 /* These RTL headers are needed for basic-block.h. */
34 #include "hard-reg-set.h"
35 #include "basic-block.h"
36 #include "diagnostic.h"
37 #include "langhooks.h"
38 #include "tree-inline.h"
39 #include "tree-flow.h"
40 #include "tree-gimple.h"
41 #include "tree-dump.h"
42 #include "tree-pass.h"
48 /* expr.h is needed for MOVE_RATIO. */
53 /* This object of this pass is to replace a non-addressable aggregate with a
54 set of independent variables. Most of the time, all of these variables
55 will be scalars. But a secondary objective is to break up larger
56 aggregates into smaller aggregates. In the process we may find that some
57 bits of the larger aggregate can be deleted as unreferenced.
59 This substitution is done globally. More localized substitutions would
60 be the purvey of a load-store motion pass.
62 The optimization proceeds in phases:
64 (1) Identify variables that have types that are candidates for
67 (2) Scan the function looking for the ways these variables are used.
68 In particular we're interested in the number of times a variable
69 (or member) is needed as a complete unit, and the number of times
70 a variable (or member) is copied.
72 (3) Based on the usage profile, instantiate substitution variables.
74 (4) Scan the function making replacements.
78 /* The set of aggregate variables that are candidates for scalarization. */
79 static bitmap sra_candidates
;
81 /* Set of scalarizable PARM_DECLs that need copy-in operations at the
82 beginning of the function. */
83 static bitmap needs_copy_in
;
85 /* Sets of bit pairs that cache type decomposition and instantiation. */
86 static bitmap sra_type_decomp_cache
;
87 static bitmap sra_type_inst_cache
;
89 /* One of these structures is created for each candidate aggregate
90 and each (accessed) member of such an aggregate. */
93 /* A tree of the elements. Used when we want to traverse everything. */
94 struct sra_elt
*parent
;
95 struct sra_elt
*children
;
96 struct sra_elt
*sibling
;
98 /* If this element is a root, then this is the VAR_DECL. If this is
99 a sub-element, this is some token used to identify the reference.
100 In the case of COMPONENT_REF, this is the FIELD_DECL. In the case
101 of an ARRAY_REF, this is the (constant) index. In the case of a
102 complex number, this is a zero or one. */
105 /* The type of the element. */
108 /* A VAR_DECL, for any sub-element we've decided to replace. */
111 /* The number of times the element is referenced as a whole. I.e.
112 given "a.b.c", this would be incremented for C, but not for A or B. */
115 /* The number of times the element is copied to or from another
116 scalarizable element. */
117 unsigned int n_copies
;
119 /* True if TYPE is scalar. */
122 /* True if we saw something about this element that prevents scalarization,
123 such as non-constant indexing. */
124 bool cannot_scalarize
;
126 /* True if we've decided that structure-to-structure assignment
127 should happen via memcpy and not per-element. */
130 /* True if everything under this element has been marked TREE_NO_WARNING. */
133 /* A flag for use with/after random access traversals. */
137 /* Random access to the child of a parent is performed by hashing.
138 This prevents quadratic behavior, and allows SRA to function
139 reasonably on larger records. */
140 static htab_t sra_map
;
142 /* All structures are allocated out of the following obstack. */
143 static struct obstack sra_obstack
;
145 /* Debugging functions. */
146 static void dump_sra_elt_name (FILE *, struct sra_elt
*);
147 extern void debug_sra_elt_name (struct sra_elt
*);
149 /* Forward declarations. */
150 static tree
generate_element_ref (struct sra_elt
*);
152 /* Return true if DECL is an SRA candidate. */
155 is_sra_candidate_decl (tree decl
)
157 return DECL_P (decl
) && bitmap_bit_p (sra_candidates
, DECL_UID (decl
));
160 /* Return true if TYPE is a scalar type. */
163 is_sra_scalar_type (tree type
)
165 enum tree_code code
= TREE_CODE (type
);
166 return (code
== INTEGER_TYPE
|| code
== REAL_TYPE
|| code
== VECTOR_TYPE
167 || code
== ENUMERAL_TYPE
|| code
== BOOLEAN_TYPE
168 || code
== CHAR_TYPE
|| code
== POINTER_TYPE
|| code
== OFFSET_TYPE
169 || code
== REFERENCE_TYPE
);
172 /* Return true if TYPE can be decomposed into a set of independent variables.
174 Note that this doesn't imply that all elements of TYPE can be
175 instantiated, just that if we decide to break up the type into
176 separate pieces that it can be done. */
179 sra_type_can_be_decomposed_p (tree type
)
181 unsigned int cache
= TYPE_UID (TYPE_MAIN_VARIANT (type
)) * 2;
184 /* Avoid searching the same type twice. */
185 if (bitmap_bit_p (sra_type_decomp_cache
, cache
+0))
187 if (bitmap_bit_p (sra_type_decomp_cache
, cache
+1))
190 /* The type must have a definite nonzero size. */
191 if (TYPE_SIZE (type
) == NULL
|| TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
192 || integer_zerop (TYPE_SIZE (type
)))
195 /* The type must be a non-union aggregate. */
196 switch (TREE_CODE (type
))
200 bool saw_one_field
= false;
202 for (t
= TYPE_FIELDS (type
); t
; t
= TREE_CHAIN (t
))
203 if (TREE_CODE (t
) == FIELD_DECL
)
205 /* Reject incorrectly represented bit fields. */
206 if (DECL_BIT_FIELD (t
)
207 && (tree_low_cst (DECL_SIZE (t
), 1)
208 != TYPE_PRECISION (TREE_TYPE (t
))))
211 saw_one_field
= true;
214 /* Record types must have at least one field. */
221 /* Array types must have a fixed lower and upper bound. */
222 t
= TYPE_DOMAIN (type
);
225 if (TYPE_MIN_VALUE (t
) == NULL
|| !TREE_CONSTANT (TYPE_MIN_VALUE (t
)))
227 if (TYPE_MAX_VALUE (t
) == NULL
|| !TREE_CONSTANT (TYPE_MAX_VALUE (t
)))
238 bitmap_set_bit (sra_type_decomp_cache
, cache
+0);
242 bitmap_set_bit (sra_type_decomp_cache
, cache
+1);
246 /* Return true if DECL can be decomposed into a set of independent
247 (though not necessarily scalar) variables. */
250 decl_can_be_decomposed_p (tree var
)
252 /* Early out for scalars. */
253 if (is_sra_scalar_type (TREE_TYPE (var
)))
256 /* The variable must not be aliased. */
257 if (!is_gimple_non_addressable (var
))
259 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
261 fprintf (dump_file
, "Cannot scalarize variable ");
262 print_generic_expr (dump_file
, var
, dump_flags
);
263 fprintf (dump_file
, " because it must live in memory\n");
268 /* The variable must not be volatile. */
269 if (TREE_THIS_VOLATILE (var
))
271 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
273 fprintf (dump_file
, "Cannot scalarize variable ");
274 print_generic_expr (dump_file
, var
, dump_flags
);
275 fprintf (dump_file
, " because it is declared volatile\n");
280 /* We must be able to decompose the variable's type. */
281 if (!sra_type_can_be_decomposed_p (TREE_TYPE (var
)))
283 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
285 fprintf (dump_file
, "Cannot scalarize variable ");
286 print_generic_expr (dump_file
, var
, dump_flags
);
287 fprintf (dump_file
, " because its type cannot be decomposed\n");
295 /* Return true if TYPE can be *completely* decomposed into scalars. */
298 type_can_instantiate_all_elements (tree type
)
300 if (is_sra_scalar_type (type
))
302 if (!sra_type_can_be_decomposed_p (type
))
305 switch (TREE_CODE (type
))
309 unsigned int cache
= TYPE_UID (TYPE_MAIN_VARIANT (type
)) * 2;
312 if (bitmap_bit_p (sra_type_inst_cache
, cache
+0))
314 if (bitmap_bit_p (sra_type_inst_cache
, cache
+1))
317 for (f
= TYPE_FIELDS (type
); f
; f
= TREE_CHAIN (f
))
318 if (TREE_CODE (f
) == FIELD_DECL
)
320 if (!type_can_instantiate_all_elements (TREE_TYPE (f
)))
322 bitmap_set_bit (sra_type_inst_cache
, cache
+1);
327 bitmap_set_bit (sra_type_inst_cache
, cache
+0);
332 return type_can_instantiate_all_elements (TREE_TYPE (type
));
342 /* Test whether ELT or some sub-element cannot be scalarized. */
345 can_completely_scalarize_p (struct sra_elt
*elt
)
349 if (elt
->cannot_scalarize
)
352 for (c
= elt
->children
; c
; c
= c
->sibling
)
353 if (!can_completely_scalarize_p (c
))
360 /* A simplified tree hashing algorithm that only handles the types of
361 trees we expect to find in sra_elt->element. */
364 sra_hash_tree (tree t
)
368 switch (TREE_CODE (t
))
377 h
= TREE_INT_CST_LOW (t
) ^ TREE_INT_CST_HIGH (t
);
381 /* We can have types that are compatible, but have different member
382 lists, so we can't hash fields by ID. Use offsets instead. */
383 h
= iterative_hash_expr (DECL_FIELD_OFFSET (t
), 0);
384 h
= iterative_hash_expr (DECL_FIELD_BIT_OFFSET (t
), h
);
394 /* Hash function for type SRA_PAIR. */
397 sra_elt_hash (const void *x
)
399 const struct sra_elt
*e
= x
;
400 const struct sra_elt
*p
;
403 h
= sra_hash_tree (e
->element
);
405 /* Take into account everything back up the chain. Given that chain
406 lengths are rarely very long, this should be acceptable. If we
407 truly identify this as a performance problem, it should work to
408 hash the pointer value "e->parent". */
409 for (p
= e
->parent
; p
; p
= p
->parent
)
410 h
= (h
* 65521) ^ sra_hash_tree (p
->element
);
415 /* Equality function for type SRA_PAIR. */
418 sra_elt_eq (const void *x
, const void *y
)
420 const struct sra_elt
*a
= x
;
421 const struct sra_elt
*b
= y
;
424 if (a
->parent
!= b
->parent
)
432 if (TREE_CODE (ae
) != TREE_CODE (be
))
435 switch (TREE_CODE (ae
))
440 /* These are all pointer unique. */
444 /* Integers are not pointer unique, so compare their values. */
445 return tree_int_cst_equal (ae
, be
);
448 /* Fields are unique within a record, but not between
449 compatible records. */
450 if (DECL_FIELD_CONTEXT (ae
) == DECL_FIELD_CONTEXT (be
))
452 return fields_compatible_p (ae
, be
);
459 /* Create or return the SRA_ELT structure for CHILD in PARENT. PARENT
460 may be null, in which case CHILD must be a DECL. */
462 static struct sra_elt
*
463 lookup_element (struct sra_elt
*parent
, tree child
, tree type
,
464 enum insert_option insert
)
466 struct sra_elt dummy
;
467 struct sra_elt
**slot
;
470 dummy
.parent
= parent
;
471 dummy
.element
= child
;
473 slot
= (struct sra_elt
**) htab_find_slot (sra_map
, &dummy
, insert
);
474 if (!slot
&& insert
== NO_INSERT
)
478 if (!elt
&& insert
== INSERT
)
480 *slot
= elt
= obstack_alloc (&sra_obstack
, sizeof (*elt
));
481 memset (elt
, 0, sizeof (*elt
));
483 elt
->parent
= parent
;
484 elt
->element
= child
;
486 elt
->is_scalar
= is_sra_scalar_type (type
);
490 elt
->sibling
= parent
->children
;
491 parent
->children
= elt
;
494 /* If this is a parameter, then if we want to scalarize, we have
495 one copy from the true function parameter. Count it now. */
496 if (TREE_CODE (child
) == PARM_DECL
)
499 bitmap_set_bit (needs_copy_in
, DECL_UID (child
));
506 /* Return true if the ARRAY_REF in EXPR is a constant, in bounds access. */
509 is_valid_const_index (tree expr
)
511 tree dom
, t
, index
= TREE_OPERAND (expr
, 1);
513 if (TREE_CODE (index
) != INTEGER_CST
)
516 /* Watch out for stupid user tricks, indexing outside the array.
518 Careful, we're not called only on scalarizable types, so do not
519 assume constant array bounds. We needn't do anything with such
520 cases, since they'll be referring to objects that we should have
521 already rejected for scalarization, so returning false is fine. */
523 dom
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (expr
, 0)));
527 t
= TYPE_MIN_VALUE (dom
);
528 if (!t
|| TREE_CODE (t
) != INTEGER_CST
)
530 if (tree_int_cst_lt (index
, t
))
533 t
= TYPE_MAX_VALUE (dom
);
534 if (!t
|| TREE_CODE (t
) != INTEGER_CST
)
536 if (tree_int_cst_lt (t
, index
))
542 /* Create or return the SRA_ELT structure for EXPR if the expression
543 refers to a scalarizable variable. */
545 static struct sra_elt
*
546 maybe_lookup_element_for_expr (tree expr
)
551 switch (TREE_CODE (expr
))
556 if (is_sra_candidate_decl (expr
))
557 return lookup_element (NULL
, expr
, TREE_TYPE (expr
), INSERT
);
561 /* We can't scalarize variable array indicies. */
562 if (is_valid_const_index (expr
))
563 child
= TREE_OPERAND (expr
, 1);
569 /* Don't look through unions. */
570 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (expr
, 0))) != RECORD_TYPE
)
572 child
= TREE_OPERAND (expr
, 1);
576 child
= integer_zero_node
;
579 child
= integer_one_node
;
586 elt
= maybe_lookup_element_for_expr (TREE_OPERAND (expr
, 0));
588 return lookup_element (elt
, child
, TREE_TYPE (expr
), INSERT
);
593 /* Functions to walk just enough of the tree to see all scalarizable
594 references, and categorize them. */
596 /* A set of callbacks for phases 2 and 4. They'll be invoked for the
597 various kinds of references seen. In all cases, *BSI is an iterator
598 pointing to the statement being processed. */
601 /* Invoked when ELT is required as a unit. Note that ELT might refer to
602 a leaf node, in which case this is a simple scalar reference. *EXPR_P
603 points to the location of the expression. IS_OUTPUT is true if this
604 is a left-hand-side reference. USE_ALL is true if we saw something we
605 couldn't quite identify and had to force the use of the entire object. */
606 void (*use
) (struct sra_elt
*elt
, tree
*expr_p
,
607 block_stmt_iterator
*bsi
, bool is_output
, bool use_all
);
609 /* Invoked when we have a copy between two scalarizable references. */
610 void (*copy
) (struct sra_elt
*lhs_elt
, struct sra_elt
*rhs_elt
,
611 block_stmt_iterator
*bsi
);
613 /* Invoked when ELT is initialized from a constant. VALUE may be NULL,
614 in which case it should be treated as an empty CONSTRUCTOR. */
615 void (*init
) (struct sra_elt
*elt
, tree value
, block_stmt_iterator
*bsi
);
617 /* Invoked when we have a copy between one scalarizable reference ELT
618 and one non-scalarizable reference OTHER. IS_OUTPUT is true if ELT
619 is on the left-hand side. */
620 void (*ldst
) (struct sra_elt
*elt
, tree other
,
621 block_stmt_iterator
*bsi
, bool is_output
);
623 /* True during phase 2, false during phase 4. */
624 /* ??? This is a hack. */
628 #ifdef ENABLE_CHECKING
629 /* Invoked via walk_tree, if *TP contains a candidate decl, return it. */
632 sra_find_candidate_decl (tree
*tp
, int *walk_subtrees
,
633 void *data ATTRIBUTE_UNUSED
)
636 enum tree_code code
= TREE_CODE (t
);
638 if (code
== VAR_DECL
|| code
== PARM_DECL
|| code
== RESULT_DECL
)
641 if (is_sra_candidate_decl (t
))
651 /* Walk most expressions looking for a scalarizable aggregate.
652 If we find one, invoke FNS->USE. */
655 sra_walk_expr (tree
*expr_p
, block_stmt_iterator
*bsi
, bool is_output
,
656 const struct sra_walk_fns
*fns
)
660 bool disable_scalarization
= false;
661 bool use_all_p
= false;
663 /* We're looking to collect a reference expression between EXPR and INNER,
664 such that INNER is a scalarizable decl and all other nodes through EXPR
665 are references that we can scalarize. If we come across something that
666 we can't scalarize, we reset EXPR. This has the effect of making it
667 appear that we're referring to the larger expression as a whole. */
670 switch (TREE_CODE (inner
))
675 /* If there is a scalarizable decl at the bottom, then process it. */
676 if (is_sra_candidate_decl (inner
))
678 struct sra_elt
*elt
= maybe_lookup_element_for_expr (expr
);
679 if (disable_scalarization
)
680 elt
->cannot_scalarize
= true;
682 fns
->use (elt
, expr_p
, bsi
, is_output
, use_all_p
);
687 /* Non-constant index means any member may be accessed. Prevent the
688 expression from being scalarized. If we were to treat this as a
689 reference to the whole array, we can wind up with a single dynamic
690 index reference inside a loop being overridden by several constant
691 index references during loop setup. It's possible that this could
692 be avoided by using dynamic usage counts based on BB trip counts
693 (based on loop analysis or profiling), but that hardly seems worth
695 /* ??? Hack. Figure out how to push this into the scan routines
696 without duplicating too much code. */
697 if (!is_valid_const_index (inner
))
699 disable_scalarization
= true;
702 /* ??? Are we assured that non-constant bounds and stride will have
703 the same value everywhere? I don't think Fortran will... */
704 if (TREE_OPERAND (inner
, 2) || TREE_OPERAND (inner
, 3))
706 inner
= TREE_OPERAND (inner
, 0);
710 /* A reference to a union member constitutes a reference to the
712 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (inner
, 0))) != RECORD_TYPE
)
714 /* ??? See above re non-constant stride. */
715 if (TREE_OPERAND (inner
, 2))
717 inner
= TREE_OPERAND (inner
, 0);
722 inner
= TREE_OPERAND (inner
, 0);
726 /* A bit field reference (access to *multiple* fields simultaneously)
727 is not currently scalarized. Consider this an access to the
728 complete outer element, to which walk_tree will bring us next. */
731 case ARRAY_RANGE_REF
:
732 /* Similarly, a subrange reference is used to modify indexing. Which
733 means that the canonical element names that we have won't work. */
736 case VIEW_CONVERT_EXPR
:
738 /* Similarly, a view/nop explicitly wants to look at an object in a
739 type other than the one we've scalarized. */
743 /* This is a transparent wrapper. The entire inner expression really
748 expr_p
= &TREE_OPERAND (inner
, 0);
749 inner
= expr
= *expr_p
;
754 #ifdef ENABLE_CHECKING
755 /* Validate that we're not missing any references. */
756 gcc_assert (!walk_tree (&inner
, sra_find_candidate_decl
, NULL
, NULL
));
762 /* Walk a TREE_LIST of values looking for scalarizable aggregates.
763 If we find one, invoke FNS->USE. */
766 sra_walk_tree_list (tree list
, block_stmt_iterator
*bsi
, bool is_output
,
767 const struct sra_walk_fns
*fns
)
770 for (op
= list
; op
; op
= TREE_CHAIN (op
))
771 sra_walk_expr (&TREE_VALUE (op
), bsi
, is_output
, fns
);
774 /* Walk the arguments of a CALL_EXPR looking for scalarizable aggregates.
775 If we find one, invoke FNS->USE. */
778 sra_walk_call_expr (tree expr
, block_stmt_iterator
*bsi
,
779 const struct sra_walk_fns
*fns
)
781 sra_walk_tree_list (TREE_OPERAND (expr
, 1), bsi
, false, fns
);
784 /* Walk the inputs and outputs of an ASM_EXPR looking for scalarizable
785 aggregates. If we find one, invoke FNS->USE. */
788 sra_walk_asm_expr (tree expr
, block_stmt_iterator
*bsi
,
789 const struct sra_walk_fns
*fns
)
791 sra_walk_tree_list (ASM_INPUTS (expr
), bsi
, false, fns
);
792 sra_walk_tree_list (ASM_OUTPUTS (expr
), bsi
, true, fns
);
795 /* Walk a MODIFY_EXPR and categorize the assignment appropriately. */
798 sra_walk_modify_expr (tree expr
, block_stmt_iterator
*bsi
,
799 const struct sra_walk_fns
*fns
)
801 struct sra_elt
*lhs_elt
, *rhs_elt
;
804 lhs
= TREE_OPERAND (expr
, 0);
805 rhs
= TREE_OPERAND (expr
, 1);
806 lhs_elt
= maybe_lookup_element_for_expr (lhs
);
807 rhs_elt
= maybe_lookup_element_for_expr (rhs
);
809 /* If both sides are scalarizable, this is a COPY operation. */
810 if (lhs_elt
&& rhs_elt
)
812 fns
->copy (lhs_elt
, rhs_elt
, bsi
);
816 /* If the RHS is scalarizable, handle it. There are only two cases. */
819 if (!rhs_elt
->is_scalar
)
820 fns
->ldst (rhs_elt
, lhs
, bsi
, false);
822 fns
->use (rhs_elt
, &TREE_OPERAND (expr
, 1), bsi
, false, false);
825 /* If it isn't scalarizable, there may be scalarizable variables within, so
826 check for a call or else walk the RHS to see if we need to do any
827 copy-in operations. We need to do it before the LHS is scalarized so
828 that the statements get inserted in the proper place, before any
829 copy-out operations. */
832 tree call
= get_call_expr_in (rhs
);
834 sra_walk_call_expr (call
, bsi
, fns
);
836 sra_walk_expr (&TREE_OPERAND (expr
, 1), bsi
, false, fns
);
839 /* Likewise, handle the LHS being scalarizable. We have cases similar
840 to those above, but also want to handle RHS being constant. */
843 /* If this is an assignment from a constant, or constructor, then
844 we have access to all of the elements individually. Invoke INIT. */
845 if (TREE_CODE (rhs
) == COMPLEX_EXPR
846 || TREE_CODE (rhs
) == COMPLEX_CST
847 || TREE_CODE (rhs
) == CONSTRUCTOR
)
848 fns
->init (lhs_elt
, rhs
, bsi
);
850 /* If this is an assignment from read-only memory, treat this as if
851 we'd been passed the constructor directly. Invoke INIT. */
852 else if (TREE_CODE (rhs
) == VAR_DECL
854 && TREE_READONLY (rhs
)
855 && targetm
.binds_local_p (rhs
))
856 fns
->init (lhs_elt
, DECL_INITIAL (rhs
), bsi
);
858 /* If this is a copy from a non-scalarizable lvalue, invoke LDST.
859 The lvalue requirement prevents us from trying to directly scalarize
860 the result of a function call. Which would result in trying to call
861 the function multiple times, and other evil things. */
862 else if (!lhs_elt
->is_scalar
&& is_gimple_addressable (rhs
))
863 fns
->ldst (lhs_elt
, rhs
, bsi
, true);
865 /* Otherwise we're being used in some context that requires the
866 aggregate to be seen as a whole. Invoke USE. */
868 fns
->use (lhs_elt
, &TREE_OPERAND (expr
, 0), bsi
, true, false);
871 /* Similarly to above, LHS_ELT being null only means that the LHS as a
872 whole is not a scalarizable reference. There may be occurrences of
873 scalarizable variables within, which implies a USE. */
875 sra_walk_expr (&TREE_OPERAND (expr
, 0), bsi
, true, fns
);
878 /* Entry point to the walk functions. Search the entire function,
879 invoking the callbacks in FNS on each of the references to
880 scalarizable variables. */
883 sra_walk_function (const struct sra_walk_fns
*fns
)
886 block_stmt_iterator si
, ni
;
888 /* ??? Phase 4 could derive some benefit to walking the function in
889 dominator tree order. */
892 for (si
= bsi_start (bb
); !bsi_end_p (si
); si
= ni
)
897 stmt
= bsi_stmt (si
);
898 ann
= stmt_ann (stmt
);
903 /* If the statement has no virtual operands, then it doesn't
904 make any structure references that we care about. */
905 if (ZERO_SSA_OPERANDS (stmt
, (SSA_OP_VIRTUAL_DEFS
| SSA_OP_VUSE
)))
908 switch (TREE_CODE (stmt
))
911 /* If we have "return <retval>" then the return value is
912 already exposed for our pleasure. Walk it as a USE to
913 force all the components back in place for the return.
915 If we have an embedded assignment, then <retval> is of
916 a type that gets returned in registers in this ABI, and
917 we do not wish to extend their lifetimes. Treat this
918 as a USE of the variable on the RHS of this assignment. */
920 t
= TREE_OPERAND (stmt
, 0);
921 if (TREE_CODE (t
) == MODIFY_EXPR
)
922 sra_walk_expr (&TREE_OPERAND (t
, 1), &si
, false, fns
);
924 sra_walk_expr (&TREE_OPERAND (stmt
, 0), &si
, false, fns
);
928 sra_walk_modify_expr (stmt
, &si
, fns
);
931 sra_walk_call_expr (stmt
, &si
, fns
);
934 sra_walk_asm_expr (stmt
, &si
, fns
);
943 /* Phase One: Scan all referenced variables in the program looking for
944 structures that could be decomposed. */
947 find_candidates_for_sra (void)
949 bool any_set
= false;
951 referenced_var_iterator rvi
;
953 FOR_EACH_REFERENCED_VAR (var
, rvi
)
955 if (decl_can_be_decomposed_p (var
))
957 bitmap_set_bit (sra_candidates
, DECL_UID (var
));
966 /* Phase Two: Scan all references to scalarizable variables. Count the
967 number of times they are used or copied respectively. */
969 /* Callbacks to fill in SRA_WALK_FNS. Everything but USE is
970 considered a copy, because we can decompose the reference such that
971 the sub-elements needn't be contiguous. */
974 scan_use (struct sra_elt
*elt
, tree
*expr_p ATTRIBUTE_UNUSED
,
975 block_stmt_iterator
*bsi ATTRIBUTE_UNUSED
,
976 bool is_output ATTRIBUTE_UNUSED
, bool use_all ATTRIBUTE_UNUSED
)
982 scan_copy (struct sra_elt
*lhs_elt
, struct sra_elt
*rhs_elt
,
983 block_stmt_iterator
*bsi ATTRIBUTE_UNUSED
)
985 lhs_elt
->n_copies
+= 1;
986 rhs_elt
->n_copies
+= 1;
990 scan_init (struct sra_elt
*lhs_elt
, tree rhs ATTRIBUTE_UNUSED
,
991 block_stmt_iterator
*bsi ATTRIBUTE_UNUSED
)
993 lhs_elt
->n_copies
+= 1;
997 scan_ldst (struct sra_elt
*elt
, tree other ATTRIBUTE_UNUSED
,
998 block_stmt_iterator
*bsi ATTRIBUTE_UNUSED
,
999 bool is_output ATTRIBUTE_UNUSED
)
1004 /* Dump the values we collected during the scanning phase. */
1007 scan_dump (struct sra_elt
*elt
)
1011 dump_sra_elt_name (dump_file
, elt
);
1012 fprintf (dump_file
, ": n_uses=%u n_copies=%u\n", elt
->n_uses
, elt
->n_copies
);
1014 for (c
= elt
->children
; c
; c
= c
->sibling
)
1018 /* Entry point to phase 2. Scan the entire function, building up
1019 scalarization data structures, recording copies and uses. */
1022 scan_function (void)
1024 static const struct sra_walk_fns fns
= {
1025 scan_use
, scan_copy
, scan_init
, scan_ldst
, true
1029 sra_walk_function (&fns
);
1031 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1035 fputs ("\nScan results:\n", dump_file
);
1036 EXECUTE_IF_SET_IN_BITMAP (sra_candidates
, 0, i
, bi
)
1038 tree var
= referenced_var (i
);
1039 struct sra_elt
*elt
= lookup_element (NULL
, var
, NULL
, NO_INSERT
);
1043 fputc ('\n', dump_file
);
1047 /* Phase Three: Make decisions about which variables to scalarize, if any.
1048 All elements to be scalarized have replacement variables made for them. */
1050 /* A subroutine of build_element_name. Recursively build the element
1051 name on the obstack. */
1054 build_element_name_1 (struct sra_elt
*elt
)
1061 build_element_name_1 (elt
->parent
);
1062 obstack_1grow (&sra_obstack
, '$');
1064 if (TREE_CODE (elt
->parent
->type
) == COMPLEX_TYPE
)
1066 if (elt
->element
== integer_zero_node
)
1067 obstack_grow (&sra_obstack
, "real", 4);
1069 obstack_grow (&sra_obstack
, "imag", 4);
1075 if (TREE_CODE (t
) == INTEGER_CST
)
1077 /* ??? Eh. Don't bother doing double-wide printing. */
1078 sprintf (buffer
, HOST_WIDE_INT_PRINT_DEC
, TREE_INT_CST_LOW (t
));
1079 obstack_grow (&sra_obstack
, buffer
, strlen (buffer
));
1083 tree name
= DECL_NAME (t
);
1085 obstack_grow (&sra_obstack
, IDENTIFIER_POINTER (name
),
1086 IDENTIFIER_LENGTH (name
));
1089 sprintf (buffer
, "D%u", DECL_UID (t
));
1090 obstack_grow (&sra_obstack
, buffer
, strlen (buffer
));
1095 /* Construct a pretty variable name for an element's replacement variable.
1096 The name is built on the obstack. */
1099 build_element_name (struct sra_elt
*elt
)
1101 build_element_name_1 (elt
);
1102 obstack_1grow (&sra_obstack
, '\0');
1103 return XOBFINISH (&sra_obstack
, char *);
1106 /* Instantiate an element as an independent variable. */
1109 instantiate_element (struct sra_elt
*elt
)
1111 struct sra_elt
*base_elt
;
1114 for (base_elt
= elt
; base_elt
->parent
; base_elt
= base_elt
->parent
)
1116 base
= base_elt
->element
;
1118 elt
->replacement
= var
= make_rename_temp (elt
->type
, "SR");
1119 DECL_SOURCE_LOCATION (var
) = DECL_SOURCE_LOCATION (base
);
1120 DECL_ARTIFICIAL (var
) = 1;
1122 if (TREE_THIS_VOLATILE (elt
->type
))
1124 TREE_THIS_VOLATILE (var
) = 1;
1125 TREE_SIDE_EFFECTS (var
) = 1;
1128 if (DECL_NAME (base
) && !DECL_IGNORED_P (base
))
1130 char *pretty_name
= build_element_name (elt
);
1131 DECL_NAME (var
) = get_identifier (pretty_name
);
1132 obstack_free (&sra_obstack
, pretty_name
);
1134 SET_DECL_DEBUG_EXPR (var
, generate_element_ref (elt
));
1135 DECL_DEBUG_EXPR_IS_FROM (var
) = 1;
1137 DECL_IGNORED_P (var
) = 0;
1138 TREE_NO_WARNING (var
) = TREE_NO_WARNING (base
);
1142 DECL_IGNORED_P (var
) = 1;
1143 /* ??? We can't generate any warning that would be meaningful. */
1144 TREE_NO_WARNING (var
) = 1;
1149 fputs (" ", dump_file
);
1150 dump_sra_elt_name (dump_file
, elt
);
1151 fputs (" -> ", dump_file
);
1152 print_generic_expr (dump_file
, var
, dump_flags
);
1153 fputc ('\n', dump_file
);
1157 /* Make one pass across an element tree deciding whether or not it's
1158 profitable to instantiate individual leaf scalars.
1160 PARENT_USES and PARENT_COPIES are the sum of the N_USES and N_COPIES
1161 fields all the way up the tree. */
1164 decide_instantiation_1 (struct sra_elt
*elt
, unsigned int parent_uses
,
1165 unsigned int parent_copies
)
1167 if (dump_file
&& !elt
->parent
)
1169 fputs ("Initial instantiation for ", dump_file
);
1170 dump_sra_elt_name (dump_file
, elt
);
1171 fputc ('\n', dump_file
);
1174 if (elt
->cannot_scalarize
)
1179 /* The decision is simple: instantiate if we're used more frequently
1180 than the parent needs to be seen as a complete unit. */
1181 if (elt
->n_uses
+ elt
->n_copies
+ parent_copies
> parent_uses
)
1182 instantiate_element (elt
);
1187 unsigned int this_uses
= elt
->n_uses
+ parent_uses
;
1188 unsigned int this_copies
= elt
->n_copies
+ parent_copies
;
1190 for (c
= elt
->children
; c
; c
= c
->sibling
)
1191 decide_instantiation_1 (c
, this_uses
, this_copies
);
1195 /* Compute the size and number of all instantiated elements below ELT.
1196 We will only care about this if the size of the complete structure
1197 fits in a HOST_WIDE_INT, so we don't have to worry about overflow. */
1200 sum_instantiated_sizes (struct sra_elt
*elt
, unsigned HOST_WIDE_INT
*sizep
)
1202 if (elt
->replacement
)
1204 *sizep
+= TREE_INT_CST_LOW (TYPE_SIZE_UNIT (elt
->type
));
1210 unsigned int count
= 0;
1212 for (c
= elt
->children
; c
; c
= c
->sibling
)
1213 count
+= sum_instantiated_sizes (c
, sizep
);
1219 /* Instantiate fields in ELT->TYPE that are not currently present as
1222 static void instantiate_missing_elements (struct sra_elt
*elt
);
1225 instantiate_missing_elements_1 (struct sra_elt
*elt
, tree child
, tree type
)
1227 struct sra_elt
*sub
= lookup_element (elt
, child
, type
, INSERT
);
1230 if (sub
->replacement
== NULL
)
1231 instantiate_element (sub
);
1234 instantiate_missing_elements (sub
);
1238 instantiate_missing_elements (struct sra_elt
*elt
)
1240 tree type
= elt
->type
;
1242 switch (TREE_CODE (type
))
1247 for (f
= TYPE_FIELDS (type
); f
; f
= TREE_CHAIN (f
))
1248 if (TREE_CODE (f
) == FIELD_DECL
)
1250 tree field_type
= TREE_TYPE (f
);
1252 /* canonicalize_component_ref() unwidens some bit-field
1253 types (not marked as DECL_BIT_FIELD in C++), so we
1254 must do the same, lest we may introduce type
1256 if (INTEGRAL_TYPE_P (field_type
)
1257 && DECL_MODE (f
) != TYPE_MODE (field_type
))
1258 field_type
= TREE_TYPE (get_unwidened (build3 (COMPONENT_REF
,
1264 instantiate_missing_elements_1 (elt
, f
, field_type
);
1271 tree i
, max
, subtype
;
1273 i
= TYPE_MIN_VALUE (TYPE_DOMAIN (type
));
1274 max
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
1275 subtype
= TREE_TYPE (type
);
1279 instantiate_missing_elements_1 (elt
, i
, subtype
);
1280 if (tree_int_cst_equal (i
, max
))
1282 i
= int_const_binop (PLUS_EXPR
, i
, integer_one_node
, true);
1289 type
= TREE_TYPE (type
);
1290 instantiate_missing_elements_1 (elt
, integer_zero_node
, type
);
1291 instantiate_missing_elements_1 (elt
, integer_one_node
, type
);
1299 /* Make one pass across an element tree deciding whether to perform block
1300 or element copies. If we decide on element copies, instantiate all
1301 elements. Return true if there are any instantiated sub-elements. */
1304 decide_block_copy (struct sra_elt
*elt
)
1309 /* If scalarization is disabled, respect it. */
1310 if (elt
->cannot_scalarize
)
1312 elt
->use_block_copy
= 1;
1316 fputs ("Scalarization disabled for ", dump_file
);
1317 dump_sra_elt_name (dump_file
, elt
);
1318 fputc ('\n', dump_file
);
1321 /* Disable scalarization of sub-elements */
1322 for (c
= elt
->children
; c
; c
= c
->sibling
)
1324 c
->cannot_scalarize
= 1;
1325 decide_block_copy (c
);
1330 /* Don't decide if we've no uses. */
1331 if (elt
->n_uses
== 0 && elt
->n_copies
== 0)
1334 else if (!elt
->is_scalar
)
1336 tree size_tree
= TYPE_SIZE_UNIT (elt
->type
);
1337 bool use_block_copy
= true;
1339 /* Tradeoffs for COMPLEX types pretty much always make it better
1340 to go ahead and split the components. */
1341 if (TREE_CODE (elt
->type
) == COMPLEX_TYPE
)
1342 use_block_copy
= false;
1344 /* Don't bother trying to figure out the rest if the structure is
1345 so large we can't do easy arithmetic. This also forces block
1346 copies for variable sized structures. */
1347 else if (host_integerp (size_tree
, 1))
1349 unsigned HOST_WIDE_INT full_size
, inst_size
= 0;
1350 unsigned int max_size
, max_count
, inst_count
, full_count
;
1352 /* If the sra-max-structure-size parameter is 0, then the
1353 user has not overridden the parameter and we can choose a
1354 sensible default. */
1355 max_size
= SRA_MAX_STRUCTURE_SIZE
1356 ? SRA_MAX_STRUCTURE_SIZE
1357 : MOVE_RATIO
* UNITS_PER_WORD
;
1358 max_count
= SRA_MAX_STRUCTURE_COUNT
1359 ? SRA_MAX_STRUCTURE_COUNT
1362 full_size
= tree_low_cst (size_tree
, 1);
1363 full_count
= count_type_elements (elt
->type
, false);
1364 inst_count
= sum_instantiated_sizes (elt
, &inst_size
);
1366 /* ??? What to do here. If there are two fields, and we've only
1367 instantiated one, then instantiating the other is clearly a win.
1368 If there are a large number of fields then the size of the copy
1369 is much more of a factor. */
1371 /* If the structure is small, and we've made copies, go ahead
1372 and instantiate, hoping that the copies will go away. */
1373 if (full_size
<= max_size
1374 && (full_count
- inst_count
) <= max_count
1375 && elt
->n_copies
> elt
->n_uses
)
1376 use_block_copy
= false;
1377 else if (inst_count
* 100 >= full_count
* SRA_FIELD_STRUCTURE_RATIO
1378 && inst_size
* 100 >= full_size
* SRA_FIELD_STRUCTURE_RATIO
)
1379 use_block_copy
= false;
1381 /* In order to avoid block copy, we have to be able to instantiate
1382 all elements of the type. See if this is possible. */
1384 && (!can_completely_scalarize_p (elt
)
1385 || !type_can_instantiate_all_elements (elt
->type
)))
1386 use_block_copy
= true;
1388 elt
->use_block_copy
= use_block_copy
;
1392 fprintf (dump_file
, "Using %s for ",
1393 use_block_copy
? "block-copy" : "element-copy");
1394 dump_sra_elt_name (dump_file
, elt
);
1395 fputc ('\n', dump_file
);
1398 if (!use_block_copy
)
1400 instantiate_missing_elements (elt
);
1405 any_inst
= elt
->replacement
!= NULL
;
1407 for (c
= elt
->children
; c
; c
= c
->sibling
)
1408 any_inst
|= decide_block_copy (c
);
1413 /* Entry point to phase 3. Instantiate scalar replacement variables. */
1416 decide_instantiations (void)
1420 bitmap_head done_head
;
1423 /* We cannot clear bits from a bitmap we're iterating over,
1424 so save up all the bits to clear until the end. */
1425 bitmap_initialize (&done_head
, &bitmap_default_obstack
);
1426 cleared_any
= false;
1428 EXECUTE_IF_SET_IN_BITMAP (sra_candidates
, 0, i
, bi
)
1430 tree var
= referenced_var (i
);
1431 struct sra_elt
*elt
= lookup_element (NULL
, var
, NULL
, NO_INSERT
);
1434 decide_instantiation_1 (elt
, 0, 0);
1435 if (!decide_block_copy (elt
))
1440 bitmap_set_bit (&done_head
, i
);
1447 bitmap_and_compl_into (sra_candidates
, &done_head
);
1448 bitmap_and_compl_into (needs_copy_in
, &done_head
);
1450 bitmap_clear (&done_head
);
1452 mark_set_for_renaming (sra_candidates
);
1455 fputc ('\n', dump_file
);
1459 /* Phase Four: Update the function to match the replacements created. */
1461 /* Mark all the variables in V_MAY_DEF or V_MUST_DEF operands for STMT for
1462 renaming. This becomes necessary when we modify all of a non-scalar. */
1465 mark_all_v_defs_1 (tree stmt
)
1470 update_stmt_if_modified (stmt
);
1472 FOR_EACH_SSA_TREE_OPERAND (sym
, stmt
, iter
, SSA_OP_ALL_VIRTUALS
)
1474 if (TREE_CODE (sym
) == SSA_NAME
)
1475 sym
= SSA_NAME_VAR (sym
);
1476 mark_sym_for_renaming (sym
);
1481 /* Mark all the variables in virtual operands in all the statements in
1482 LIST for renaming. */
1485 mark_all_v_defs (tree list
)
1487 if (TREE_CODE (list
) != STATEMENT_LIST
)
1488 mark_all_v_defs_1 (list
);
1491 tree_stmt_iterator i
;
1492 for (i
= tsi_start (list
); !tsi_end_p (i
); tsi_next (&i
))
1493 mark_all_v_defs_1 (tsi_stmt (i
));
1497 /* Mark every replacement under ELT with TREE_NO_WARNING. */
1500 mark_no_warning (struct sra_elt
*elt
)
1502 if (!elt
->all_no_warning
)
1504 if (elt
->replacement
)
1505 TREE_NO_WARNING (elt
->replacement
) = 1;
1509 for (c
= elt
->children
; c
; c
= c
->sibling
)
1510 mark_no_warning (c
);
1515 /* Build a single level component reference to ELT rooted at BASE. */
1518 generate_one_element_ref (struct sra_elt
*elt
, tree base
)
1520 switch (TREE_CODE (TREE_TYPE (base
)))
1524 tree field
= elt
->element
;
1526 /* Watch out for compatible records with differing field lists. */
1527 if (DECL_FIELD_CONTEXT (field
) != TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
1528 field
= find_compatible_field (TREE_TYPE (base
), field
);
1530 return build (COMPONENT_REF
, elt
->type
, base
, field
, NULL
);
1534 return build (ARRAY_REF
, elt
->type
, base
, elt
->element
, NULL
, NULL
);
1537 if (elt
->element
== integer_zero_node
)
1538 return build (REALPART_EXPR
, elt
->type
, base
);
1540 return build (IMAGPART_EXPR
, elt
->type
, base
);
1547 /* Build a full component reference to ELT rooted at its native variable. */
1550 generate_element_ref (struct sra_elt
*elt
)
1553 return generate_one_element_ref (elt
, generate_element_ref (elt
->parent
));
1555 return elt
->element
;
1559 sra_build_assignment (tree dst
, tree src
)
1561 /* We need TYPE_CANONICAL to compare the types of dst and src
1562 efficiently, but that's only introduced in GCC 4.3. */
1563 return build (MODIFY_EXPR
, void_type_node
, dst
, src
);
1566 /* Generate a set of assignment statements in *LIST_P to copy all
1567 instantiated elements under ELT to or from the equivalent structure
1568 rooted at EXPR. COPY_OUT controls the direction of the copy, with
1569 true meaning to copy out of EXPR into ELT. */
1572 generate_copy_inout (struct sra_elt
*elt
, bool copy_out
, tree expr
,
1578 if (!copy_out
&& TREE_CODE (expr
) == SSA_NAME
1579 && TREE_CODE (TREE_TYPE (expr
)) == COMPLEX_TYPE
)
1583 c
= lookup_element (elt
, integer_zero_node
, NULL
, NO_INSERT
);
1585 c
= lookup_element (elt
, integer_one_node
, NULL
, NO_INSERT
);
1588 t
= build (COMPLEX_EXPR
, elt
->type
, r
, i
);
1589 t
= sra_build_assignment (expr
, t
);
1590 SSA_NAME_DEF_STMT (expr
) = t
;
1591 append_to_statement_list (t
, list_p
);
1593 else if (elt
->replacement
)
1596 t
= sra_build_assignment (elt
->replacement
, expr
);
1598 t
= sra_build_assignment (expr
, elt
->replacement
);
1599 append_to_statement_list (t
, list_p
);
1603 for (c
= elt
->children
; c
; c
= c
->sibling
)
1605 t
= generate_one_element_ref (c
, unshare_expr (expr
));
1606 generate_copy_inout (c
, copy_out
, t
, list_p
);
1611 /* Generate a set of assignment statements in *LIST_P to copy all instantiated
1612 elements under SRC to their counterparts under DST. There must be a 1-1
1613 correspondence of instantiated elements. */
1616 generate_element_copy (struct sra_elt
*dst
, struct sra_elt
*src
, tree
*list_p
)
1618 struct sra_elt
*dc
, *sc
;
1620 for (dc
= dst
->children
; dc
; dc
= dc
->sibling
)
1622 sc
= lookup_element (src
, dc
->element
, NULL
, NO_INSERT
);
1624 generate_element_copy (dc
, sc
, list_p
);
1627 if (dst
->replacement
)
1631 gcc_assert (src
->replacement
);
1633 t
= sra_build_assignment (dst
->replacement
, src
->replacement
);
1634 append_to_statement_list (t
, list_p
);
1638 /* Generate a set of assignment statements in *LIST_P to zero all instantiated
1639 elements under ELT. In addition, do not assign to elements that have been
1640 marked VISITED but do reset the visited flag; this allows easy coordination
1641 with generate_element_init. */
1644 generate_element_zero (struct sra_elt
*elt
, tree
*list_p
)
1650 elt
->visited
= false;
1654 for (c
= elt
->children
; c
; c
= c
->sibling
)
1655 generate_element_zero (c
, list_p
);
1657 if (elt
->replacement
)
1661 gcc_assert (elt
->is_scalar
);
1662 t
= fold_convert (elt
->type
, integer_zero_node
);
1664 t
= sra_build_assignment (elt
->replacement
, t
);
1665 append_to_statement_list (t
, list_p
);
1669 /* Generate an assignment VAR = INIT, where INIT may need gimplification.
1670 Add the result to *LIST_P. */
1673 generate_one_element_init (tree var
, tree init
, tree
*list_p
)
1675 /* The replacement can be almost arbitrarily complex. Gimplify. */
1678 stmt
= sra_build_assignment (var
, init
);
1679 gimplify_and_add (stmt
, list_p
);
1682 /* Generate a set of assignment statements in *LIST_P to set all instantiated
1683 elements under ELT with the contents of the initializer INIT. In addition,
1684 mark all assigned elements VISITED; this allows easy coordination with
1685 generate_element_zero. Return false if we found a case we couldn't
1689 generate_element_init_1 (struct sra_elt
*elt
, tree init
, tree
*list_p
)
1692 enum tree_code init_code
;
1693 struct sra_elt
*sub
;
1695 unsigned HOST_WIDE_INT idx
;
1696 tree value
, purpose
;
1698 /* We can be passed DECL_INITIAL of a static variable. It might have a
1699 conversion, which we strip off here. */
1700 STRIP_USELESS_TYPE_CONVERSION (init
);
1701 init_code
= TREE_CODE (init
);
1705 if (elt
->replacement
)
1707 generate_one_element_init (elt
->replacement
, init
, list_p
);
1708 elt
->visited
= true;
1717 for (sub
= elt
->children
; sub
; sub
= sub
->sibling
)
1719 if (sub
->element
== integer_zero_node
)
1720 t
= (init_code
== COMPLEX_EXPR
1721 ? TREE_OPERAND (init
, 0) : TREE_REALPART (init
));
1723 t
= (init_code
== COMPLEX_EXPR
1724 ? TREE_OPERAND (init
, 1) : TREE_IMAGPART (init
));
1725 result
&= generate_element_init_1 (sub
, t
, list_p
);
1730 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init
), idx
, purpose
, value
)
1732 if (TREE_CODE (purpose
) == RANGE_EXPR
)
1734 tree lower
= TREE_OPERAND (purpose
, 0);
1735 tree upper
= TREE_OPERAND (purpose
, 1);
1739 sub
= lookup_element (elt
, lower
, NULL
, NO_INSERT
);
1741 result
&= generate_element_init_1 (sub
, value
, list_p
);
1742 if (tree_int_cst_equal (lower
, upper
))
1744 lower
= int_const_binop (PLUS_EXPR
, lower
,
1745 integer_one_node
, true);
1750 sub
= lookup_element (elt
, purpose
, NULL
, NO_INSERT
);
1752 result
&= generate_element_init_1 (sub
, value
, list_p
);
1758 elt
->visited
= true;
1765 /* A wrapper function for generate_element_init_1 that handles cleanup after
1769 generate_element_init (struct sra_elt
*elt
, tree init
, tree
*list_p
)
1773 push_gimplify_context ();
1774 ret
= generate_element_init_1 (elt
, init
, list_p
);
1775 pop_gimplify_context (NULL
);
1777 /* The replacement can expose previously unreferenced variables. */
1780 tree_stmt_iterator i
;
1782 for (i
= tsi_start (*list_p
); !tsi_end_p (i
); tsi_next (&i
))
1783 find_new_referenced_vars (tsi_stmt_ptr (i
));
1789 /* Insert STMT on all the outgoing edges out of BB. Note that if BB
1790 has more than one edge, STMT will be replicated for each edge. Also,
1791 abnormal edges will be ignored. */
1794 insert_edge_copies (tree stmt
, basic_block bb
)
1801 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1803 /* We don't need to insert copies on abnormal edges. The
1804 value of the scalar replacement is not guaranteed to
1805 be valid through an abnormal edge. */
1806 if (!(e
->flags
& EDGE_ABNORMAL
))
1810 bsi_insert_on_edge (e
, stmt
);
1814 bsi_insert_on_edge (e
, unsave_expr_now (stmt
));
1819 /* Helper function to insert LIST before BSI, and set up line number info. */
1822 sra_insert_before (block_stmt_iterator
*bsi
, tree list
)
1824 tree stmt
= bsi_stmt (*bsi
);
1826 if (EXPR_HAS_LOCATION (stmt
))
1827 annotate_all_with_locus (&list
, EXPR_LOCATION (stmt
));
1828 bsi_insert_before (bsi
, list
, BSI_SAME_STMT
);
1831 /* Similarly, but insert after BSI. Handles insertion onto edges as well. */
1834 sra_insert_after (block_stmt_iterator
*bsi
, tree list
)
1836 tree stmt
= bsi_stmt (*bsi
);
1838 if (EXPR_HAS_LOCATION (stmt
))
1839 annotate_all_with_locus (&list
, EXPR_LOCATION (stmt
));
1841 if (stmt_ends_bb_p (stmt
))
1842 insert_edge_copies (list
, bsi
->bb
);
1844 bsi_insert_after (bsi
, list
, BSI_SAME_STMT
);
1847 /* Similarly, but replace the statement at BSI. */
1850 sra_replace (block_stmt_iterator
*bsi
, tree list
)
1852 sra_insert_before (bsi
, list
);
1854 if (bsi_end_p (*bsi
))
1855 *bsi
= bsi_last (bsi
->bb
);
1860 /* Scalarize a USE. To recap, this is either a simple reference to ELT,
1861 if elt is scalar, or some occurrence of ELT that requires a complete
1862 aggregate. IS_OUTPUT is true if ELT is being modified. */
1865 scalarize_use (struct sra_elt
*elt
, tree
*expr_p
, block_stmt_iterator
*bsi
,
1866 bool is_output
, bool use_all
)
1868 tree list
= NULL
, stmt
= bsi_stmt (*bsi
);
1870 if (elt
->replacement
)
1872 /* If we have a replacement, then updating the reference is as
1873 simple as modifying the existing statement in place. */
1875 mark_all_v_defs (stmt
);
1876 *expr_p
= elt
->replacement
;
1881 /* Otherwise we need some copies. If ELT is being read, then we want
1882 to store all (modified) sub-elements back into the structure before
1883 the reference takes place. If ELT is being written, then we want to
1884 load the changed values back into our shadow variables. */
1885 /* ??? We don't check modified for reads, we just always write all of
1886 the values. We should be able to record the SSA number of the VOP
1887 for which the values were last read. If that number matches the
1888 SSA number of the VOP in the current statement, then we needn't
1889 emit an assignment. This would also eliminate double writes when
1890 a structure is passed as more than one argument to a function call.
1891 This optimization would be most effective if sra_walk_function
1892 processed the blocks in dominator order. */
1894 generate_copy_inout (elt
, is_output
, generate_element_ref (elt
), &list
);
1897 mark_all_v_defs (list
);
1899 sra_insert_after (bsi
, list
);
1902 sra_insert_before (bsi
, list
);
1904 mark_no_warning (elt
);
1909 /* Scalarize a COPY. To recap, this is an assignment statement between
1910 two scalarizable references, LHS_ELT and RHS_ELT. */
1913 scalarize_copy (struct sra_elt
*lhs_elt
, struct sra_elt
*rhs_elt
,
1914 block_stmt_iterator
*bsi
)
1918 if (lhs_elt
->replacement
&& rhs_elt
->replacement
)
1920 /* If we have two scalar operands, modify the existing statement. */
1921 stmt
= bsi_stmt (*bsi
);
1923 /* See the commentary in sra_walk_function concerning
1924 RETURN_EXPR, and why we should never see one here. */
1925 gcc_assert (TREE_CODE (stmt
) == MODIFY_EXPR
);
1927 TREE_OPERAND (stmt
, 0) = lhs_elt
->replacement
;
1928 TREE_OPERAND (stmt
, 1) = rhs_elt
->replacement
;
1931 else if (lhs_elt
->use_block_copy
|| rhs_elt
->use_block_copy
)
1933 /* If either side requires a block copy, then sync the RHS back
1934 to the original structure, leave the original assignment
1935 statement (which will perform the block copy), then load the
1936 LHS values out of its now-updated original structure. */
1937 /* ??? Could perform a modified pair-wise element copy. That
1938 would at least allow those elements that are instantiated in
1939 both structures to be optimized well. */
1942 generate_copy_inout (rhs_elt
, false,
1943 generate_element_ref (rhs_elt
), &list
);
1946 mark_all_v_defs (list
);
1947 sra_insert_before (bsi
, list
);
1951 generate_copy_inout (lhs_elt
, true,
1952 generate_element_ref (lhs_elt
), &list
);
1955 mark_all_v_defs (list
);
1956 sra_insert_after (bsi
, list
);
1961 /* Otherwise both sides must be fully instantiated. In which
1962 case perform pair-wise element assignments and replace the
1963 original block copy statement. */
1965 stmt
= bsi_stmt (*bsi
);
1966 mark_all_v_defs (stmt
);
1969 generate_element_copy (lhs_elt
, rhs_elt
, &list
);
1971 mark_all_v_defs (list
);
1972 sra_replace (bsi
, list
);
1976 /* Scalarize an INIT. To recap, this is an assignment to a scalarizable
1977 reference from some form of constructor: CONSTRUCTOR, COMPLEX_CST or
1978 COMPLEX_EXPR. If RHS is NULL, it should be treated as an empty
1982 scalarize_init (struct sra_elt
*lhs_elt
, tree rhs
, block_stmt_iterator
*bsi
)
1987 /* Generate initialization statements for all members extant in the RHS. */
1990 /* Unshare the expression just in case this is from a decl's initial. */
1991 rhs
= unshare_expr (rhs
);
1992 result
= generate_element_init (lhs_elt
, rhs
, &list
);
1995 /* CONSTRUCTOR is defined such that any member not mentioned is assigned
1996 a zero value. Initialize the rest of the instantiated elements. */
1997 generate_element_zero (lhs_elt
, &list
);
2001 /* If we failed to convert the entire initializer, then we must
2002 leave the structure assignment in place and must load values
2003 from the structure into the slots for which we did not find
2004 constants. The easiest way to do this is to generate a complete
2005 copy-out, and then follow that with the constant assignments
2006 that we were able to build. DCE will clean things up. */
2008 generate_copy_inout (lhs_elt
, true, generate_element_ref (lhs_elt
),
2010 append_to_statement_list (list
, &list0
);
2014 if (lhs_elt
->use_block_copy
|| !result
)
2016 /* Since LHS is not fully instantiated, we must leave the structure
2017 assignment in place. Treating this case differently from a USE
2018 exposes constants to later optimizations. */
2021 mark_all_v_defs (list
);
2022 sra_insert_after (bsi
, list
);
2027 /* The LHS is fully instantiated. The list of initializations
2028 replaces the original structure assignment. */
2030 mark_all_v_defs (bsi_stmt (*bsi
));
2031 mark_all_v_defs (list
);
2032 sra_replace (bsi
, list
);
2036 /* A subroutine of scalarize_ldst called via walk_tree. Set TREE_NO_TRAP
2037 on all INDIRECT_REFs. */
2040 mark_notrap (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
2044 if (TREE_CODE (t
) == INDIRECT_REF
)
2046 TREE_THIS_NOTRAP (t
) = 1;
2049 else if (IS_TYPE_OR_DECL_P (t
))
2055 /* Scalarize a LDST. To recap, this is an assignment between one scalarizable
2056 reference ELT and one non-scalarizable reference OTHER. IS_OUTPUT is true
2057 if ELT is on the left-hand side. */
2060 scalarize_ldst (struct sra_elt
*elt
, tree other
,
2061 block_stmt_iterator
*bsi
, bool is_output
)
2063 /* Shouldn't have gotten called for a scalar. */
2064 gcc_assert (!elt
->replacement
);
2066 if (elt
->use_block_copy
)
2068 /* Since ELT is not fully instantiated, we have to leave the
2069 block copy in place. Treat this as a USE. */
2070 scalarize_use (elt
, NULL
, bsi
, is_output
, false);
2074 /* The interesting case is when ELT is fully instantiated. In this
2075 case we can have each element stored/loaded directly to/from the
2076 corresponding slot in OTHER. This avoids a block copy. */
2078 tree list
= NULL
, stmt
= bsi_stmt (*bsi
);
2080 mark_all_v_defs (stmt
);
2081 generate_copy_inout (elt
, is_output
, other
, &list
);
2082 mark_all_v_defs (list
);
2085 /* Preserve EH semantics. */
2086 if (stmt_ends_bb_p (stmt
))
2088 tree_stmt_iterator tsi
;
2091 /* Extract the first statement from LIST. */
2092 tsi
= tsi_start (list
);
2093 first
= tsi_stmt (tsi
);
2096 /* Replace the old statement with this new representative. */
2097 bsi_replace (bsi
, first
, true);
2099 if (!tsi_end_p (tsi
))
2101 /* If any reference would trap, then they all would. And more
2102 to the point, the first would. Therefore none of the rest
2103 will trap since the first didn't. Indicate this by
2104 iterating over the remaining statements and set
2105 TREE_THIS_NOTRAP in all INDIRECT_REFs. */
2108 walk_tree (tsi_stmt_ptr (tsi
), mark_notrap
, NULL
, NULL
);
2111 while (!tsi_end_p (tsi
));
2113 insert_edge_copies (list
, bsi
->bb
);
2117 sra_replace (bsi
, list
);
2121 /* Generate initializations for all scalarizable parameters. */
2124 scalarize_parms (void)
2130 EXECUTE_IF_SET_IN_BITMAP (needs_copy_in
, 0, i
, bi
)
2132 tree var
= referenced_var (i
);
2133 struct sra_elt
*elt
= lookup_element (NULL
, var
, NULL
, NO_INSERT
);
2134 generate_copy_inout (elt
, true, var
, &list
);
2139 insert_edge_copies (list
, ENTRY_BLOCK_PTR
);
2140 mark_all_v_defs (list
);
2144 /* Entry point to phase 4. Update the function to match replacements. */
2147 scalarize_function (void)
2149 static const struct sra_walk_fns fns
= {
2150 scalarize_use
, scalarize_copy
, scalarize_init
, scalarize_ldst
, false
2153 sra_walk_function (&fns
);
2155 bsi_commit_edge_inserts ();
2159 /* Debug helper function. Print ELT in a nice human-readable format. */
2162 dump_sra_elt_name (FILE *f
, struct sra_elt
*elt
)
2164 if (elt
->parent
&& TREE_CODE (elt
->parent
->type
) == COMPLEX_TYPE
)
2166 fputs (elt
->element
== integer_zero_node
? "__real__ " : "__imag__ ", f
);
2167 dump_sra_elt_name (f
, elt
->parent
);
2172 dump_sra_elt_name (f
, elt
->parent
);
2173 if (DECL_P (elt
->element
))
2175 if (TREE_CODE (elt
->element
) == FIELD_DECL
)
2177 print_generic_expr (f
, elt
->element
, dump_flags
);
2180 fprintf (f
, "[" HOST_WIDE_INT_PRINT_DEC
"]",
2181 TREE_INT_CST_LOW (elt
->element
));
2185 /* Likewise, but callable from the debugger. */
2188 debug_sra_elt_name (struct sra_elt
*elt
)
2190 dump_sra_elt_name (stderr
, elt
);
2191 fputc ('\n', stderr
);
2195 sra_init_cache (void)
2197 if (sra_type_decomp_cache
)
2200 sra_type_decomp_cache
= BITMAP_ALLOC (NULL
);
2201 sra_type_inst_cache
= BITMAP_ALLOC (NULL
);
2204 /* Main entry point. */
2209 /* Initialize local variables. */
2210 gcc_obstack_init (&sra_obstack
);
2211 sra_candidates
= BITMAP_ALLOC (NULL
);
2212 needs_copy_in
= BITMAP_ALLOC (NULL
);
2214 sra_map
= htab_create (101, sra_elt_hash
, sra_elt_eq
, NULL
);
2216 /* Scan. If we find anything, instantiate and scalarize. */
2217 if (find_candidates_for_sra ())
2220 decide_instantiations ();
2221 scalarize_function ();
2224 /* Free allocated memory. */
2225 htab_delete (sra_map
);
2227 BITMAP_FREE (sra_candidates
);
2228 BITMAP_FREE (needs_copy_in
);
2229 BITMAP_FREE (sra_type_decomp_cache
);
2230 BITMAP_FREE (sra_type_inst_cache
);
2231 obstack_free (&sra_obstack
, NULL
);
2237 return flag_tree_sra
!= 0;
2240 struct tree_opt_pass pass_sra
=
2243 gate_sra
, /* gate */
2244 tree_sra
, /* execute */
2247 0, /* static_pass_number */
2248 TV_TREE_SRA
, /* tv_id */
2249 PROP_cfg
| PROP_ssa
| PROP_alias
, /* properties_required */
2250 0, /* properties_provided */
2251 0, /* properties_destroyed */
2252 0, /* todo_flags_start */
2253 TODO_dump_func
| TODO_update_ssa
2254 | TODO_ggc_collect
| TODO_verify_ssa
, /* todo_flags_finish */