1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
4 Copyright (C) 2008-2025 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
33 Both passes operate in four stages:
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
76 #include "coretypes.h"
83 #include "alloc-pool.h"
84 #include "tree-pass.h"
87 #include "gimple-pretty-print.h"
89 #include "fold-const.h"
91 #include "stor-layout.h"
93 #include "gimple-iterator.h"
94 #include "gimplify-me.h"
95 #include "gimple-walk.h"
100 #include "builtins.h"
101 #include "tree-sra.h"
104 /* Enumeration of all aggregate reductions we can do. */
105 enum sra_mode
{ SRA_MODE_EARLY_IPA
, /* early call regularization */
106 SRA_MODE_EARLY_INTRA
, /* early intraprocedural SRA */
107 SRA_MODE_INTRA
}; /* late intraprocedural SRA */
109 /* Global variable describing which aggregate reduction we are performing at
111 static enum sra_mode sra_mode
;
115 /* ACCESS represents each access to an aggregate variable (as a whole or a
116 part). It can also represent a group of accesses that refer to exactly the
117 same fragment of an aggregate (i.e. those that have exactly the same offset
118 and size). Such representatives for a single aggregate, once determined,
119 are linked in a linked list and have the group fields set.
121 Moreover, when doing intraprocedural SRA, a tree is built from those
122 representatives (by the means of first_child and next_sibling pointers), in
123 which all items in a subtree are "within" the root, i.e. their offset is
124 greater or equal to offset of the root and offset+size is smaller or equal
125 to offset+size of the root. Children of an access are sorted by offset.
127 Note that accesses to parts of vector and complex number types always
128 represented by an access to the whole complex number or a vector. It is a
129 duty of the modifying functions to replace them appropriately. */
133 /* Values returned by `get_ref_base_and_extent' for each component reference
134 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
135 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
136 HOST_WIDE_INT offset
;
140 /* Expression. It is context dependent so do not use it to create new
141 expressions to access the original aggregate. See PR 42154 for a
147 /* The statement this access belongs to. */
150 /* Next group representative for this aggregate. */
151 struct access
*next_grp
;
153 /* Pointer to the group representative. Pointer to itself if the struct is
154 the representative. */
155 struct access
*group_representative
;
157 /* After access tree has been constructed, this points to the parent of the
158 current access, if there is one. NULL for roots. */
159 struct access
*parent
;
161 /* If this access has any children (in terms of the definition above), this
162 points to the first one. */
163 struct access
*first_child
;
165 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
167 struct access
*next_sibling
;
169 /* Pointers to the first and last element in the linked list of assign
170 links for propagation from LHS to RHS. */
171 struct assign_link
*first_rhs_link
, *last_rhs_link
;
173 /* Pointers to the first and last element in the linked list of assign
174 links for propagation from LHS to RHS. */
175 struct assign_link
*first_lhs_link
, *last_lhs_link
;
177 /* Pointer to the next access in the work queues. */
178 struct access
*next_rhs_queued
, *next_lhs_queued
;
180 /* Replacement variable for this access "region." Never to be accessed
181 directly, always only by the means of get_access_replacement() and only
182 when grp_to_be_replaced flag is set. */
183 tree replacement_decl
;
185 /* Is this access made in reverse storage order? */
186 unsigned reverse
: 1;
188 /* Is this particular access write access? */
191 /* Is this access currently in the rhs work queue? */
192 unsigned grp_rhs_queued
: 1;
194 /* Is this access currently in the lhs work queue? */
195 unsigned grp_lhs_queued
: 1;
197 /* Does this group contain a write access? This flag is propagated down the
199 unsigned grp_write
: 1;
201 /* Does this group contain a read access? This flag is propagated down the
203 unsigned grp_read
: 1;
205 /* Does this group contain a read access that comes from an assignment
206 statement? This flag is propagated down the access tree. */
207 unsigned grp_assignment_read
: 1;
209 /* Does this group contain a write access that comes from an assignment
210 statement? This flag is propagated down the access tree. */
211 unsigned grp_assignment_write
: 1;
213 /* Does this group contain a read access through a scalar type? This flag is
214 not propagated in the access tree in any direction. */
215 unsigned grp_scalar_read
: 1;
217 /* Does this group contain a write access through a scalar type? This flag
218 is not propagated in the access tree in any direction. */
219 unsigned grp_scalar_write
: 1;
221 /* In a root of an access tree, true means that the entire tree should be
222 totally scalarized - that all scalar leafs should be scalarized and
223 non-root grp_total_scalarization accesses should be honored. Otherwise,
224 non-root accesses with grp_total_scalarization should never get scalar
226 unsigned grp_total_scalarization
: 1;
228 /* Other passes of the analysis use this bit to make function
229 analyze_access_subtree create scalar replacements for this group if
231 unsigned grp_hint
: 1;
233 /* Is the subtree rooted in this access fully covered by scalar
235 unsigned grp_covered
: 1;
237 /* If set to true, this access and all below it in an access tree must not be
239 unsigned grp_unscalarizable_region
: 1;
241 /* Whether data have been written to parts of the aggregate covered by this
242 access which is not to be scalarized. This flag is propagated up in the
244 unsigned grp_unscalarized_data
: 1;
246 /* Set if all accesses in the group consist of the same chain of
247 COMPONENT_REFs and ARRAY_REFs. */
248 unsigned grp_same_access_path
: 1;
250 /* Does this access and/or group contain a write access through a
252 unsigned grp_partial_lhs
: 1;
254 /* Set when a scalar replacement should be created for this variable. */
255 unsigned grp_to_be_replaced
: 1;
257 /* Set when we want a replacement for the sole purpose of having it in
258 generated debug statements. */
259 unsigned grp_to_be_debug_replaced
: 1;
261 /* Should TREE_NO_WARNING of a replacement be set? */
262 unsigned grp_no_warning
: 1;
264 /* Result of propagation accross link from LHS to RHS. */
265 unsigned grp_result_of_prop_from_lhs
: 1;
268 typedef struct access
*access_p
;
271 /* Alloc pool for allocating access structures. */
272 static object_allocator
<struct access
> access_pool ("SRA accesses");
274 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
275 are used to propagate subaccesses from rhs to lhs and vice versa as long as
276 they don't conflict with what is already there. In the RHS->LHS direction,
277 we also propagate grp_write flag to lazily mark that the access contains any
281 struct access
*lacc
, *racc
;
282 struct assign_link
*next_rhs
, *next_lhs
;
285 /* Alloc pool for allocating assign link structures. */
286 static object_allocator
<assign_link
> assign_link_pool ("SRA links");
288 /* Base (tree) -> Vector (vec<access_p> *) map. */
289 static hash_map
<tree
, auto_vec
<access_p
> > *base_access_vec
;
291 /* Hash to limit creation of artificial accesses */
292 static hash_map
<tree
, unsigned> *propagation_budget
;
294 /* Candidate hash table helpers. */
296 struct uid_decl_hasher
: nofree_ptr_hash
<tree_node
>
298 static inline hashval_t
hash (const tree_node
*);
299 static inline bool equal (const tree_node
*, const tree_node
*);
302 /* Hash a tree in a uid_decl_map. */
305 uid_decl_hasher::hash (const tree_node
*item
)
307 return item
->decl_minimal
.uid
;
310 /* Return true if the DECL_UID in both trees are equal. */
313 uid_decl_hasher::equal (const tree_node
*a
, const tree_node
*b
)
315 return (a
->decl_minimal
.uid
== b
->decl_minimal
.uid
);
318 /* Set of candidates. */
319 static bitmap candidate_bitmap
;
320 static hash_table
<uid_decl_hasher
> *candidates
;
322 /* For a candidate UID return the candidates decl. */
325 candidate (unsigned uid
)
328 t
.decl_minimal
.uid
= uid
;
329 return candidates
->find_with_hash (&t
, static_cast <hashval_t
> (uid
));
332 /* Bitmap of candidates which we should try to entirely scalarize away and
333 those which cannot be (because they are and need be used as a whole). */
334 static bitmap should_scalarize_away_bitmap
, cannot_scalarize_away_bitmap
;
336 /* Bitmap of candidates in the constant pool, which cannot be scalarized
337 because this would produce non-constant expressions (e.g. Ada). */
338 static bitmap disqualified_constants
;
340 /* Bitmap of candidates which are passed by reference in call arguments. */
341 static bitmap passed_by_ref_in_call
;
343 /* Obstack for creation of fancy names. */
344 static struct obstack name_obstack
;
346 /* Head of a linked list of accesses that need to have its subaccesses
347 propagated to their assignment counterparts. */
348 static struct access
*rhs_work_queue_head
, *lhs_work_queue_head
;
350 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
351 representative fields are dumped, otherwise those which only describe the
352 individual access are. */
356 /* Number of processed aggregates is readily available in
357 analyze_all_variable_accesses and so is not stored here. */
359 /* Number of created scalar replacements. */
362 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
366 /* Number of statements created by generate_subtree_copies. */
369 /* Number of statements created by load_assign_lhs_subreplacements. */
372 /* Number of times sra_modify_assign has deleted a statement. */
375 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
376 RHS reparately due to type conversions or nonexistent matching
378 int separate_lhs_rhs_handling
;
380 /* Number of parameters that were removed because they were unused. */
381 int deleted_unused_parameters
;
383 /* Number of scalars passed as parameters by reference that have been
384 converted to be passed by value. */
385 int scalar_by_ref_to_by_val
;
387 /* Number of aggregate parameters that were replaced by one or more of their
389 int aggregate_params_reduced
;
391 /* Numbber of components created when splitting aggregate parameters. */
392 int param_reductions_created
;
394 /* Number of deferred_init calls that are modified. */
397 /* Number of deferred_init calls that are created by
398 generate_subtree_deferred_init. */
399 int subtree_deferred_init
;
403 dump_access (FILE *f
, struct access
*access
, bool grp
)
405 fprintf (f
, "access { ");
406 fprintf (f
, "base = (%d)'", DECL_UID (access
->base
));
407 print_generic_expr (f
, access
->base
);
408 fprintf (f
, "', offset = " HOST_WIDE_INT_PRINT_DEC
, access
->offset
);
409 fprintf (f
, ", size = " HOST_WIDE_INT_PRINT_DEC
, access
->size
);
410 fprintf (f
, ", expr = ");
411 print_generic_expr (f
, access
->expr
);
412 fprintf (f
, ", type = ");
413 print_generic_expr (f
, access
->type
);
414 fprintf (f
, ", reverse = %d", access
->reverse
);
416 fprintf (f
, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
417 "grp_assignment_write = %d, grp_scalar_read = %d, "
418 "grp_scalar_write = %d, grp_total_scalarization = %d, "
419 "grp_hint = %d, grp_covered = %d, "
420 "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
421 "grp_same_access_path = %d, grp_partial_lhs = %d, "
422 "grp_to_be_replaced = %d, grp_to_be_debug_replaced = %d}\n",
423 access
->grp_read
, access
->grp_write
, access
->grp_assignment_read
,
424 access
->grp_assignment_write
, access
->grp_scalar_read
,
425 access
->grp_scalar_write
, access
->grp_total_scalarization
,
426 access
->grp_hint
, access
->grp_covered
,
427 access
->grp_unscalarizable_region
, access
->grp_unscalarized_data
,
428 access
->grp_same_access_path
, access
->grp_partial_lhs
,
429 access
->grp_to_be_replaced
, access
->grp_to_be_debug_replaced
);
431 fprintf (f
, ", write = %d, grp_total_scalarization = %d, "
432 "grp_partial_lhs = %d}\n",
433 access
->write
, access
->grp_total_scalarization
,
434 access
->grp_partial_lhs
);
437 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
440 dump_access_tree_1 (FILE *f
, struct access
*access
, int level
)
446 for (i
= 0; i
< level
; i
++)
449 dump_access (f
, access
, true);
451 if (access
->first_child
)
452 dump_access_tree_1 (f
, access
->first_child
, level
+ 1);
454 access
= access
->next_sibling
;
459 /* Dump all access trees for a variable, given the pointer to the first root in
463 dump_access_tree (FILE *f
, struct access
*access
)
465 for (; access
; access
= access
->next_grp
)
466 dump_access_tree_1 (f
, access
, 0);
469 /* Return true iff ACC is non-NULL and has subaccesses. */
472 access_has_children_p (struct access
*acc
)
474 return acc
&& acc
->first_child
;
477 /* Return true iff ACC is (partly) covered by at least one replacement. */
480 access_has_replacements_p (struct access
*acc
)
482 struct access
*child
;
483 if (acc
->grp_to_be_replaced
)
485 for (child
= acc
->first_child
; child
; child
= child
->next_sibling
)
486 if (access_has_replacements_p (child
))
491 /* Return a vector of pointers to accesses for the variable given in BASE or
492 NULL if there is none. */
494 static vec
<access_p
> *
495 get_base_access_vector (tree base
)
497 return base_access_vec
->get (base
);
500 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
501 in ACCESS. Return NULL if it cannot be found. */
503 static struct access
*
504 find_access_in_subtree (struct access
*access
, HOST_WIDE_INT offset
,
507 while (access
&& (access
->offset
!= offset
|| access
->size
!= size
))
509 struct access
*child
= access
->first_child
;
511 while (child
&& (child
->offset
+ child
->size
<= offset
))
512 child
= child
->next_sibling
;
516 /* Total scalarization does not replace single field structures with their
517 single field but rather creates an access for them underneath. Look for
520 while (access
->first_child
521 && access
->first_child
->offset
== offset
522 && access
->first_child
->size
== size
)
523 access
= access
->first_child
;
528 /* Return the first group representative for DECL or NULL if none exists. */
530 static struct access
*
531 get_first_repr_for_decl (tree base
)
533 vec
<access_p
> *access_vec
;
535 access_vec
= get_base_access_vector (base
);
539 return (*access_vec
)[0];
542 /* Find an access representative for the variable BASE and given OFFSET and
543 SIZE. Requires that access trees have already been built. Return NULL if
544 it cannot be found. */
546 static struct access
*
547 get_var_base_offset_size_access (tree base
, HOST_WIDE_INT offset
,
550 struct access
*access
;
552 access
= get_first_repr_for_decl (base
);
553 while (access
&& (access
->offset
+ access
->size
<= offset
))
554 access
= access
->next_grp
;
558 return find_access_in_subtree (access
, offset
, size
);
561 /* Add LINK to the linked list of assign links of RACC. */
564 add_link_to_rhs (struct access
*racc
, struct assign_link
*link
)
566 gcc_assert (link
->racc
== racc
);
568 if (!racc
->first_rhs_link
)
570 gcc_assert (!racc
->last_rhs_link
);
571 racc
->first_rhs_link
= link
;
574 racc
->last_rhs_link
->next_rhs
= link
;
576 racc
->last_rhs_link
= link
;
577 link
->next_rhs
= NULL
;
580 /* Add LINK to the linked list of lhs assign links of LACC. */
583 add_link_to_lhs (struct access
*lacc
, struct assign_link
*link
)
585 gcc_assert (link
->lacc
== lacc
);
587 if (!lacc
->first_lhs_link
)
589 gcc_assert (!lacc
->last_lhs_link
);
590 lacc
->first_lhs_link
= link
;
593 lacc
->last_lhs_link
->next_lhs
= link
;
595 lacc
->last_lhs_link
= link
;
596 link
->next_lhs
= NULL
;
599 /* Move all link structures in their linked list in OLD_ACC to the linked list
602 relink_to_new_repr (struct access
*new_acc
, struct access
*old_acc
)
604 if (old_acc
->first_rhs_link
)
607 if (new_acc
->first_rhs_link
)
609 gcc_assert (!new_acc
->last_rhs_link
->next_rhs
);
610 gcc_assert (!old_acc
->last_rhs_link
611 || !old_acc
->last_rhs_link
->next_rhs
);
613 new_acc
->last_rhs_link
->next_rhs
= old_acc
->first_rhs_link
;
614 new_acc
->last_rhs_link
= old_acc
->last_rhs_link
;
618 gcc_assert (!new_acc
->last_rhs_link
);
620 new_acc
->first_rhs_link
= old_acc
->first_rhs_link
;
621 new_acc
->last_rhs_link
= old_acc
->last_rhs_link
;
623 old_acc
->first_rhs_link
= old_acc
->last_rhs_link
= NULL
;
626 gcc_assert (!old_acc
->last_rhs_link
);
628 if (old_acc
->first_lhs_link
)
631 if (new_acc
->first_lhs_link
)
633 gcc_assert (!new_acc
->last_lhs_link
->next_lhs
);
634 gcc_assert (!old_acc
->last_lhs_link
635 || !old_acc
->last_lhs_link
->next_lhs
);
637 new_acc
->last_lhs_link
->next_lhs
= old_acc
->first_lhs_link
;
638 new_acc
->last_lhs_link
= old_acc
->last_lhs_link
;
642 gcc_assert (!new_acc
->last_lhs_link
);
644 new_acc
->first_lhs_link
= old_acc
->first_lhs_link
;
645 new_acc
->last_lhs_link
= old_acc
->last_lhs_link
;
647 old_acc
->first_lhs_link
= old_acc
->last_lhs_link
= NULL
;
650 gcc_assert (!old_acc
->last_lhs_link
);
654 /* Add ACCESS to the work to queue for propagation of subaccesses from RHS to
655 LHS (which is actually a stack). */
658 add_access_to_rhs_work_queue (struct access
*access
)
660 if (access
->first_rhs_link
&& !access
->grp_rhs_queued
)
662 gcc_assert (!access
->next_rhs_queued
);
663 access
->next_rhs_queued
= rhs_work_queue_head
;
664 access
->grp_rhs_queued
= 1;
665 rhs_work_queue_head
= access
;
669 /* Add ACCESS to the work to queue for propagation of subaccesses from LHS to
670 RHS (which is actually a stack). */
673 add_access_to_lhs_work_queue (struct access
*access
)
675 if (access
->first_lhs_link
&& !access
->grp_lhs_queued
)
677 gcc_assert (!access
->next_lhs_queued
);
678 access
->next_lhs_queued
= lhs_work_queue_head
;
679 access
->grp_lhs_queued
= 1;
680 lhs_work_queue_head
= access
;
684 /* Pop an access from the work queue for propagating from RHS to LHS, and
685 return it, assuming there is one. */
687 static struct access
*
688 pop_access_from_rhs_work_queue (void)
690 struct access
*access
= rhs_work_queue_head
;
692 rhs_work_queue_head
= access
->next_rhs_queued
;
693 access
->next_rhs_queued
= NULL
;
694 access
->grp_rhs_queued
= 0;
698 /* Pop an access from the work queue for propagating from LHS to RHS, and
699 return it, assuming there is one. */
701 static struct access
*
702 pop_access_from_lhs_work_queue (void)
704 struct access
*access
= lhs_work_queue_head
;
706 lhs_work_queue_head
= access
->next_lhs_queued
;
707 access
->next_lhs_queued
= NULL
;
708 access
->grp_lhs_queued
= 0;
712 /* Allocate necessary structures. */
715 sra_initialize (void)
717 candidate_bitmap
= BITMAP_ALLOC (NULL
);
718 candidates
= new hash_table
<uid_decl_hasher
>
719 (vec_safe_length (cfun
->local_decls
) / 2);
720 should_scalarize_away_bitmap
= BITMAP_ALLOC (NULL
);
721 cannot_scalarize_away_bitmap
= BITMAP_ALLOC (NULL
);
722 disqualified_constants
= BITMAP_ALLOC (NULL
);
723 passed_by_ref_in_call
= BITMAP_ALLOC (NULL
);
724 gcc_obstack_init (&name_obstack
);
725 base_access_vec
= new hash_map
<tree
, auto_vec
<access_p
> >;
726 memset (&sra_stats
, 0, sizeof (sra_stats
));
729 /* Deallocate all general structures. */
732 sra_deinitialize (void)
734 BITMAP_FREE (candidate_bitmap
);
737 BITMAP_FREE (should_scalarize_away_bitmap
);
738 BITMAP_FREE (cannot_scalarize_away_bitmap
);
739 BITMAP_FREE (disqualified_constants
);
740 BITMAP_FREE (passed_by_ref_in_call
);
741 access_pool
.release ();
742 assign_link_pool
.release ();
743 obstack_free (&name_obstack
, NULL
);
745 delete base_access_vec
;
748 /* Return true if DECL is a VAR_DECL in the constant pool, false otherwise. */
750 static bool constant_decl_p (tree decl
)
752 return VAR_P (decl
) && DECL_IN_CONSTANT_POOL (decl
);
755 /* Remove DECL from candidates for SRA and write REASON to the dump file if
759 disqualify_candidate (tree decl
, const char *reason
)
761 if (bitmap_clear_bit (candidate_bitmap
, DECL_UID (decl
)))
762 candidates
->remove_elt_with_hash (decl
, DECL_UID (decl
));
763 if (constant_decl_p (decl
))
764 bitmap_set_bit (disqualified_constants
, DECL_UID (decl
));
766 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
768 fprintf (dump_file
, "! Disqualifying ");
769 print_generic_expr (dump_file
, decl
);
770 fprintf (dump_file
, " - %s\n", reason
);
774 /* Return true iff the type contains a field or an element which does not allow
775 scalarization. Use VISITED_TYPES to avoid re-checking already checked
779 type_internals_preclude_sra_p_1 (tree type
, const char **msg
,
780 hash_set
<tree
> *visited_types
)
785 if (visited_types
->contains (type
))
787 visited_types
->add (type
);
789 switch (TREE_CODE (type
))
793 case QUAL_UNION_TYPE
:
794 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
795 if (TREE_CODE (fld
) == FIELD_DECL
)
797 if (TREE_CODE (fld
) == FUNCTION_DECL
)
799 tree ft
= TREE_TYPE (fld
);
801 if (TREE_THIS_VOLATILE (fld
))
803 *msg
= "volatile structure field";
806 if (!DECL_FIELD_OFFSET (fld
))
808 *msg
= "no structure field offset";
811 if (!DECL_SIZE (fld
))
813 *msg
= "zero structure field size";
816 if (!tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
818 *msg
= "structure field offset not fixed";
821 if (!tree_fits_uhwi_p (DECL_SIZE (fld
)))
823 *msg
= "structure field size not fixed";
826 if (!tree_fits_shwi_p (bit_position (fld
)))
828 *msg
= "structure field size too big";
831 if (AGGREGATE_TYPE_P (ft
)
832 && int_bit_position (fld
) % BITS_PER_UNIT
!= 0)
834 *msg
= "structure field is bit field";
838 if (AGGREGATE_TYPE_P (ft
)
839 && type_internals_preclude_sra_p_1 (ft
, msg
, visited_types
))
846 et
= TREE_TYPE (type
);
848 if (TYPE_VOLATILE (et
))
850 *msg
= "element type is volatile";
854 if (AGGREGATE_TYPE_P (et
)
855 && type_internals_preclude_sra_p_1 (et
, msg
, visited_types
))
865 /* Return true iff the type contains a field or an element which does not allow
869 type_internals_preclude_sra_p (tree type
, const char **msg
)
871 hash_set
<tree
> visited_types
;
872 return type_internals_preclude_sra_p_1 (type
, msg
, &visited_types
);
876 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
877 the three fields. Also add it to the vector of accesses corresponding to
878 the base. Finally, return the new access. */
880 static struct access
*
881 create_access_1 (tree base
, HOST_WIDE_INT offset
, HOST_WIDE_INT size
)
883 struct access
*access
= access_pool
.allocate ();
885 memset (access
, 0, sizeof (struct access
));
887 access
->offset
= offset
;
890 base_access_vec
->get_or_insert (base
).safe_push (access
);
895 static bool maybe_add_sra_candidate (tree
);
897 /* Create and insert access for EXPR. Return created access, or NULL if it is
898 not possible. Also scan for uses of constant pool as we go along and add
901 static struct access
*
902 create_access (tree expr
, gimple
*stmt
, bool write
)
904 struct access
*access
;
905 poly_int64 poffset
, psize
, pmax_size
;
907 bool reverse
, unscalarizable_region
= false;
909 base
= get_ref_base_and_extent (expr
, &poffset
, &psize
, &pmax_size
,
912 /* For constant-pool entries, check we can substitute the constant value. */
913 if (constant_decl_p (base
)
914 && !bitmap_bit_p (disqualified_constants
, DECL_UID (base
)))
917 && !is_gimple_reg_type (TREE_TYPE (expr
))
918 && dump_file
&& (dump_flags
& TDF_DETAILS
))
920 /* This occurs in Ada with accesses to ARRAY_RANGE_REFs,
921 and elements of multidimensional arrays (which are
922 multi-element arrays in their own right). */
923 fprintf (dump_file
, "Allowing non-reg-type load of part"
924 " of constant-pool entry: ");
925 print_generic_expr (dump_file
, expr
);
927 maybe_add_sra_candidate (base
);
930 if (!DECL_P (base
) || !bitmap_bit_p (candidate_bitmap
, DECL_UID (base
)))
933 if (write
&& TREE_READONLY (base
))
935 disqualify_candidate (base
, "Encountered a store to a read-only decl.");
939 HOST_WIDE_INT offset
, size
, max_size
;
940 if (!poffset
.is_constant (&offset
)
941 || !psize
.is_constant (&size
)
942 || !pmax_size
.is_constant (&max_size
))
944 disqualify_candidate (base
, "Encountered a polynomial-sized access.");
948 if (size
!= max_size
)
951 unscalarizable_region
= true;
957 disqualify_candidate (base
, "Encountered a negative offset access.");
962 disqualify_candidate (base
, "Encountered an unconstrained access.");
965 if (offset
+ size
> tree_to_shwi (DECL_SIZE (base
)))
967 disqualify_candidate (base
, "Encountered an access beyond the base.");
970 if (TREE_CODE (TREE_TYPE (expr
)) == BITINT_TYPE
971 && size
> WIDE_INT_MAX_PRECISION
- 1)
973 disqualify_candidate (base
, "Encountered too large _BitInt access.");
977 access
= create_access_1 (base
, offset
, size
);
979 access
->type
= TREE_TYPE (expr
);
980 access
->write
= write
;
981 access
->grp_unscalarizable_region
= unscalarizable_region
;
983 access
->reverse
= reverse
;
988 /* Given an array type TYPE, extract element size to *EL_SIZE, minimum index to
989 *IDX and maximum index to *MAX so that the caller can iterate over all
990 elements and return true, except if the array is known to be zero-length,
991 then return false. */
994 prepare_iteration_over_array_elts (tree type
, HOST_WIDE_INT
*el_size
,
995 offset_int
*idx
, offset_int
*max
)
997 tree elem_size
= TYPE_SIZE (TREE_TYPE (type
));
998 gcc_assert (elem_size
&& tree_fits_shwi_p (elem_size
));
999 *el_size
= tree_to_shwi (elem_size
);
1000 gcc_assert (*el_size
> 0);
1002 tree minidx
= TYPE_MIN_VALUE (TYPE_DOMAIN (type
));
1003 gcc_assert (TREE_CODE (minidx
) == INTEGER_CST
);
1004 tree maxidx
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
1005 /* Skip (some) zero-length arrays; others have MAXIDX == MINIDX - 1. */
1008 gcc_assert (TREE_CODE (maxidx
) == INTEGER_CST
);
1009 tree domain
= TYPE_DOMAIN (type
);
1010 /* MINIDX and MAXIDX are inclusive, and must be interpreted in
1011 DOMAIN (e.g. signed int, whereas min/max may be size_int). */
1012 *idx
= wi::to_offset (minidx
);
1013 *max
= wi::to_offset (maxidx
);
1014 if (!TYPE_UNSIGNED (domain
))
1016 *idx
= wi::sext (*idx
, TYPE_PRECISION (domain
));
1017 *max
= wi::sext (*max
, TYPE_PRECISION (domain
));
1022 /* A structure to track collecting padding and hold collected padding
1025 class sra_padding_collecting
1028 /* Given that there won't be any data until at least OFFSET, add an
1029 appropriate entry to the list of paddings or extend the last one. */
1030 void record_padding (HOST_WIDE_INT offset
);
1031 /* Vector of pairs describing contiguous pieces of padding, each pair
1032 consisting of offset and length. */
1033 auto_vec
<std::pair
<HOST_WIDE_INT
, HOST_WIDE_INT
>, 10> m_padding
;
1034 /* Offset where data should continue after the last seen actual bit of data
1035 if there was no padding. */
1036 HOST_WIDE_INT m_data_until
= 0;
1039 /* Given that there won't be any data until at least OFFSET, add an appropriate
1040 entry to the list of paddings or extend the last one. */
1042 void sra_padding_collecting::record_padding (HOST_WIDE_INT offset
)
1044 if (offset
> m_data_until
)
1046 HOST_WIDE_INT psz
= offset
- m_data_until
;
1047 if (!m_padding
.is_empty ()
1048 && ((m_padding
[m_padding
.length () - 1].first
1049 + m_padding
[m_padding
.length () - 1].second
) == offset
))
1050 m_padding
[m_padding
.length () - 1].second
+= psz
;
1052 m_padding
.safe_push (std::make_pair (m_data_until
, psz
));
1056 /* Return true iff TYPE is totally scalarizable - i.e. a RECORD_TYPE or
1057 fixed-length ARRAY_TYPE with fields that are either of gimple register types
1058 (excluding bit-fields) or (recursively) scalarizable types. CONST_DECL must
1059 be true if we are considering a decl from constant pool. If it is false,
1060 char arrays will be refused.
1062 TOTAL_OFFSET is the offset of TYPE within any outer type that is being
1065 If PC is non-NULL, collect padding information into the vector within the
1066 structure. The information is however only complete if the function returns
1067 true and does not contain any padding at its end. */
1070 totally_scalarizable_type_p (tree type
, bool const_decl
,
1071 HOST_WIDE_INT total_offset
,
1072 sra_padding_collecting
*pc
)
1074 if (is_gimple_reg_type (type
))
1078 pc
->record_padding (total_offset
);
1079 pc
->m_data_until
= total_offset
+ tree_to_shwi (TYPE_SIZE (type
));
1083 if (type_contains_placeholder_p (type
))
1086 bool have_predecessor_field
= false;
1087 HOST_WIDE_INT prev_pos
= 0;
1089 switch (TREE_CODE (type
))
1092 for (tree fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
1093 if (TREE_CODE (fld
) == FIELD_DECL
)
1095 tree ft
= TREE_TYPE (fld
);
1097 if (!DECL_SIZE (fld
))
1099 if (zerop (DECL_SIZE (fld
)))
1102 HOST_WIDE_INT pos
= int_bit_position (fld
);
1103 if (have_predecessor_field
1107 have_predecessor_field
= true;
1110 if (DECL_BIT_FIELD (fld
))
1113 if (!totally_scalarizable_type_p (ft
, const_decl
, total_offset
+ pos
,
1122 HOST_WIDE_INT min_elem_size
;
1126 min_elem_size
= BITS_PER_UNIT
;
1128 if (TYPE_DOMAIN (type
) == NULL_TREE
1129 || !tree_fits_shwi_p (TYPE_SIZE (type
))
1130 || !tree_fits_shwi_p (TYPE_SIZE (TREE_TYPE (type
)))
1131 || (tree_to_shwi (TYPE_SIZE (TREE_TYPE (type
))) <= min_elem_size
)
1132 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type
))))
1134 if (tree_to_shwi (TYPE_SIZE (type
)) == 0
1135 && TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == NULL_TREE
)
1136 /* Zero-element array, should not prevent scalarization. */
1138 else if ((tree_to_shwi (TYPE_SIZE (type
)) <= 0)
1139 || !tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type
))))
1140 /* Variable-length array, do not allow scalarization. */
1143 unsigned old_padding_len
= 0;
1145 old_padding_len
= pc
->m_padding
.length ();
1146 tree elem
= TREE_TYPE (type
);
1147 if (!totally_scalarizable_type_p (elem
, const_decl
, total_offset
, pc
))
1151 unsigned new_padding_len
= pc
->m_padding
.length ();
1152 HOST_WIDE_INT el_size
;
1153 offset_int idx
, max
;
1154 if (!prepare_iteration_over_array_elts (type
, &el_size
, &idx
, &max
))
1156 pc
->record_padding (total_offset
+ el_size
);
1158 for (HOST_WIDE_INT pos
= total_offset
+ el_size
;
1160 pos
+= el_size
, ++idx
)
1162 for (unsigned i
= old_padding_len
; i
< new_padding_len
; i
++)
1165 = pos
+ pc
->m_padding
[i
].first
- total_offset
;
1166 HOST_WIDE_INT psz
= pc
->m_padding
[i
].second
;
1167 pc
->m_padding
.safe_push (std::make_pair (pp
, psz
));
1170 pc
->m_data_until
= total_offset
+ tree_to_shwi (TYPE_SIZE (type
));
1179 /* Return true if REF has an VIEW_CONVERT_EXPR somewhere in it. */
1182 contains_view_convert_expr_p (const_tree ref
)
1184 while (handled_component_p (ref
))
1186 if (TREE_CODE (ref
) == VIEW_CONVERT_EXPR
)
1188 ref
= TREE_OPERAND (ref
, 0);
1194 /* Return true if REF contains a VIEW_CONVERT_EXPR or a COMPONENT_REF with a
1195 bit-field field declaration. If TYPE_CHANGING_P is non-NULL, set the bool
1196 it points to will be set if REF contains any of the above or a MEM_REF
1197 expression that effectively performs type conversion. */
1200 contains_vce_or_bfcref_p (const_tree ref
, bool *type_changing_p
= NULL
)
1202 while (handled_component_p (ref
))
1204 if (TREE_CODE (ref
) == VIEW_CONVERT_EXPR
1205 || (TREE_CODE (ref
) == COMPONENT_REF
1206 && DECL_BIT_FIELD (TREE_OPERAND (ref
, 1))))
1208 if (type_changing_p
)
1209 *type_changing_p
= true;
1212 ref
= TREE_OPERAND (ref
, 0);
1215 if (!type_changing_p
1216 || TREE_CODE (ref
) != MEM_REF
1217 || TREE_CODE (TREE_OPERAND (ref
, 0)) != ADDR_EXPR
)
1220 tree mem
= TREE_OPERAND (TREE_OPERAND (ref
, 0), 0);
1221 if (TYPE_MAIN_VARIANT (TREE_TYPE (ref
))
1222 != TYPE_MAIN_VARIANT (TREE_TYPE (mem
)))
1223 *type_changing_p
= true;
1228 /* Search the given tree for a declaration by skipping handled components and
1229 exclude it from the candidates. */
1232 disqualify_base_of_expr (tree t
, const char *reason
)
1234 t
= get_base_address (t
);
1235 if (t
&& DECL_P (t
))
1236 disqualify_candidate (t
, reason
);
1239 /* Return true if the BIT_FIELD_REF read EXPR is handled by SRA. */
1242 sra_handled_bf_read_p (tree expr
)
1244 uint64_t size
, offset
;
1245 if (bit_field_size (expr
).is_constant (&size
)
1246 && bit_field_offset (expr
).is_constant (&offset
)
1247 && size
% BITS_PER_UNIT
== 0
1248 && offset
% BITS_PER_UNIT
== 0
1249 && pow2p_hwi (size
))
1254 /* Scan expression EXPR and create access structures for all accesses to
1255 candidates for scalarization. Return the created access or NULL if none is
1258 static struct access
*
1259 build_access_from_expr_1 (tree expr
, gimple
*stmt
, bool write
)
1261 /* We only allow ADDR_EXPRs in arguments of function calls and those must
1262 have been dealt with in build_access_from_call_arg. Any other address
1263 taking should have been caught by scan_visit_addr. */
1264 if (TREE_CODE (expr
) == ADDR_EXPR
)
1266 tree base
= get_base_address (TREE_OPERAND (expr
, 0));
1267 gcc_assert (!DECL_P (base
)
1268 || !bitmap_bit_p (candidate_bitmap
, DECL_UID (base
)));
1272 struct access
*ret
= NULL
;
1275 if ((TREE_CODE (expr
) == BIT_FIELD_REF
1276 && (write
|| !sra_handled_bf_read_p (expr
)))
1277 || TREE_CODE (expr
) == IMAGPART_EXPR
1278 || TREE_CODE (expr
) == REALPART_EXPR
)
1280 expr
= TREE_OPERAND (expr
, 0);
1284 partial_ref
= false;
1286 if (storage_order_barrier_p (expr
))
1288 disqualify_base_of_expr (expr
, "storage order barrier.");
1292 /* We need to dive through V_C_Es in order to get the size of its parameter
1293 and not the result type. Ada produces such statements. We are also
1294 capable of handling the topmost V_C_E but not any of those buried in other
1295 handled components. */
1296 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
1297 expr
= TREE_OPERAND (expr
, 0);
1299 if (contains_view_convert_expr_p (expr
))
1301 disqualify_base_of_expr (expr
, "V_C_E under a different handled "
1305 if (TREE_THIS_VOLATILE (expr
))
1307 disqualify_base_of_expr (expr
, "part of a volatile reference.");
1311 switch (TREE_CODE (expr
))
1314 if (TREE_CODE (TREE_OPERAND (expr
, 0)) != ADDR_EXPR
)
1322 case ARRAY_RANGE_REF
:
1324 ret
= create_access (expr
, stmt
, write
);
1331 if (write
&& partial_ref
&& ret
)
1332 ret
->grp_partial_lhs
= 1;
1337 /* Scan expression EXPR and create access structures for all accesses to
1338 candidates for scalarization. Return true if any access has been inserted.
1339 STMT must be the statement from which the expression is taken, WRITE must be
1340 true if the expression is a store and false otherwise. */
1343 build_access_from_expr (tree expr
, gimple
*stmt
, bool write
)
1345 struct access
*access
;
1347 access
= build_access_from_expr_1 (expr
, stmt
, write
);
1350 /* This means the aggregate is accesses as a whole in a way other than an
1351 assign statement and thus cannot be removed even if we had a scalar
1352 replacement for everything. */
1353 if (cannot_scalarize_away_bitmap
)
1354 bitmap_set_bit (cannot_scalarize_away_bitmap
, DECL_UID (access
->base
));
1360 enum out_edge_check
{ SRA_OUTGOING_EDGES_UNCHECKED
, SRA_OUTGOING_EDGES_OK
,
1361 SRA_OUTGOING_EDGES_FAIL
};
1363 /* Return true if STMT terminates BB and there is an abnormal edge going out of
1364 the BB and remember the decision in OE_CHECK. */
1367 abnormal_edge_after_stmt_p (gimple
*stmt
, enum out_edge_check
*oe_check
)
1369 if (*oe_check
== SRA_OUTGOING_EDGES_OK
)
1371 if (*oe_check
== SRA_OUTGOING_EDGES_FAIL
)
1373 if (stmt_ends_bb_p (stmt
))
1377 FOR_EACH_EDGE (e
, ei
, gimple_bb (stmt
)->succs
)
1378 if (e
->flags
& EDGE_ABNORMAL
)
1380 *oe_check
= SRA_OUTGOING_EDGES_FAIL
;
1384 *oe_check
= SRA_OUTGOING_EDGES_OK
;
1388 /* Scan expression EXPR which is an argument of a call and create access
1389 structures for all accesses to candidates for scalarization. Return true
1390 if any access has been inserted. STMT must be the statement from which the
1391 expression is taken. CAN_BE_RETURNED must be true if call argument flags
1392 do not rule out that the argument is directly returned. OE_CHECK is used
1393 to remember result of a test for abnormal outgoing edges after this
1397 build_access_from_call_arg (tree expr
, gimple
*stmt
, bool can_be_returned
,
1398 enum out_edge_check
*oe_check
)
1400 if (gimple_call_flags (stmt
) & ECF_RETURNS_TWICE
)
1403 if (TREE_CODE (expr
) == ADDR_EXPR
)
1404 base
= get_base_address (TREE_OPERAND (expr
, 0));
1405 disqualify_base_of_expr (base
, "Passed to a returns_twice call.");
1409 if (TREE_CODE (expr
) == ADDR_EXPR
)
1411 tree base
= get_base_address (TREE_OPERAND (expr
, 0));
1413 if (can_be_returned
)
1415 disqualify_base_of_expr (base
, "Address possibly returned, "
1416 "leading to an alis SRA may not know.");
1419 if (abnormal_edge_after_stmt_p (stmt
, oe_check
))
1421 disqualify_base_of_expr (base
, "May lead to need to add statements "
1422 "to abnormal edge.");
1426 bool read
= build_access_from_expr (base
, stmt
, false);
1427 bool write
= build_access_from_expr (base
, stmt
, true);
1430 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1432 fprintf (dump_file
, "Allowed ADDR_EXPR of ");
1433 print_generic_expr (dump_file
, base
);
1434 fprintf (dump_file
, " because of ");
1435 print_gimple_stmt (dump_file
, stmt
, 0);
1436 fprintf (dump_file
, "\n");
1438 bitmap_set_bit (passed_by_ref_in_call
, DECL_UID (base
));
1445 return build_access_from_expr (expr
, stmt
, false);
1449 /* Return the single non-EH successor edge of BB or NULL if there is none or
1453 single_non_eh_succ (basic_block bb
)
1458 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1459 if (!(e
->flags
& EDGE_EH
))
1469 /* Disqualify LHS and RHS for scalarization if STMT has to terminate its BB and
1470 there is no alternative spot where to put statements SRA might need to
1471 generate after it. The spot we are looking for is an edge leading to a
1472 single non-EH successor, if it exists and is indeed single. RHS may be
1473 NULL, in that case ignore it. */
1476 disqualify_if_bad_bb_terminating_stmt (gimple
*stmt
, tree lhs
, tree rhs
)
1478 if (stmt_ends_bb_p (stmt
))
1480 if (single_non_eh_succ (gimple_bb (stmt
)))
1483 disqualify_base_of_expr (lhs
, "LHS of a throwing stmt.");
1485 disqualify_base_of_expr (rhs
, "RHS of a throwing stmt.");
1491 /* Return true if the nature of BASE is such that it contains data even if
1492 there is no write to it in the function. */
1495 comes_initialized_p (tree base
)
1497 return TREE_CODE (base
) == PARM_DECL
|| constant_decl_p (base
);
1500 /* Scan expressions occurring in STMT, create access structures for all accesses
1501 to candidates for scalarization and remove those candidates which occur in
1502 statements or expressions that prevent them from being split apart. Return
1503 true if any access has been inserted. */
1506 build_accesses_from_assign (gimple
*stmt
)
1509 struct access
*lacc
, *racc
;
1511 if (!gimple_assign_single_p (stmt
)
1512 /* Scope clobbers don't influence scalarization. */
1513 || gimple_clobber_p (stmt
))
1516 lhs
= gimple_assign_lhs (stmt
);
1517 rhs
= gimple_assign_rhs1 (stmt
);
1519 if (disqualify_if_bad_bb_terminating_stmt (stmt
, lhs
, rhs
))
1522 racc
= build_access_from_expr_1 (rhs
, stmt
, false);
1523 lacc
= build_access_from_expr_1 (lhs
, stmt
, true);
1527 lacc
->grp_assignment_write
= 1;
1528 if (storage_order_barrier_p (rhs
))
1529 lacc
->grp_unscalarizable_region
= 1;
1531 if (should_scalarize_away_bitmap
&& !is_gimple_reg_type (lacc
->type
))
1533 bool type_changing_p
= false;
1534 contains_vce_or_bfcref_p (lhs
, &type_changing_p
);
1535 if (type_changing_p
)
1536 bitmap_set_bit (cannot_scalarize_away_bitmap
,
1537 DECL_UID (lacc
->base
));
1543 racc
->grp_assignment_read
= 1;
1544 if (should_scalarize_away_bitmap
&& !is_gimple_reg_type (racc
->type
))
1546 bool type_changing_p
= false;
1547 contains_vce_or_bfcref_p (rhs
, &type_changing_p
);
1549 if (type_changing_p
|| gimple_has_volatile_ops (stmt
))
1550 bitmap_set_bit (cannot_scalarize_away_bitmap
,
1551 DECL_UID (racc
->base
));
1553 bitmap_set_bit (should_scalarize_away_bitmap
,
1554 DECL_UID (racc
->base
));
1556 if (storage_order_barrier_p (lhs
))
1557 racc
->grp_unscalarizable_region
= 1;
1561 && (sra_mode
== SRA_MODE_EARLY_INTRA
|| sra_mode
== SRA_MODE_INTRA
)
1562 && !lacc
->grp_unscalarizable_region
1563 && !racc
->grp_unscalarizable_region
1564 && AGGREGATE_TYPE_P (TREE_TYPE (lhs
))
1565 && lacc
->size
== racc
->size
1566 && useless_type_conversion_p (lacc
->type
, racc
->type
))
1568 struct assign_link
*link
;
1570 link
= assign_link_pool
.allocate ();
1571 memset (link
, 0, sizeof (struct assign_link
));
1575 add_link_to_rhs (racc
, link
);
1576 add_link_to_lhs (lacc
, link
);
1577 add_access_to_rhs_work_queue (racc
);
1578 add_access_to_lhs_work_queue (lacc
);
1580 /* Let's delay marking the areas as written until propagation of accesses
1581 across link, unless the nature of rhs tells us that its data comes
1583 if (!comes_initialized_p (racc
->base
))
1584 lacc
->write
= false;
1587 return lacc
|| racc
;
1590 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to detect taking
1591 addresses of candidates a places which are not call arguments. Such
1592 candidates are disqalified from SRA. This also applies to GIMPLE_ASM
1593 operands with memory constrains which cannot be scalarized. */
1596 scan_visit_addr (gimple
*, tree op
, tree
, void *)
1598 op
= get_base_address (op
);
1601 disqualify_candidate (op
, "Address taken in a non-call-argument context.");
1606 /* Scan function and look for interesting expressions and create access
1607 structures for them. Return true iff any access is created. */
1610 scan_function (void)
1615 FOR_EACH_BB_FN (bb
, cfun
)
1617 gimple_stmt_iterator gsi
;
1618 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1619 walk_stmt_load_store_addr_ops (gsi_stmt (gsi
), NULL
, NULL
, NULL
,
1622 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1624 gimple
*stmt
= gsi_stmt (gsi
);
1628 if (gimple_code (stmt
) != GIMPLE_CALL
)
1629 walk_stmt_load_store_addr_ops (stmt
, NULL
, NULL
, NULL
,
1632 switch (gimple_code (stmt
))
1635 t
= gimple_return_retval (as_a
<greturn
*> (stmt
));
1637 ret
|= build_access_from_expr (t
, stmt
, false);
1641 ret
|= build_accesses_from_assign (stmt
);
1646 enum out_edge_check oe_check
= SRA_OUTGOING_EDGES_UNCHECKED
;
1647 gcall
*call
= as_a
<gcall
*> (stmt
);
1648 for (i
= 0; i
< gimple_call_num_args (call
); i
++)
1650 bool can_be_returned
;
1651 if (gimple_call_lhs (call
))
1653 int af
= gimple_call_arg_flags (call
, i
);
1654 can_be_returned
= !(af
& EAF_NOT_RETURNED_DIRECTLY
);
1657 can_be_returned
= false;
1658 ret
|= build_access_from_call_arg (gimple_call_arg (call
,
1660 stmt
, can_be_returned
,
1663 if (gimple_call_chain(stmt
))
1664 ret
|= build_access_from_call_arg (gimple_call_chain(call
),
1665 stmt
, false, &oe_check
);
1668 t
= gimple_call_lhs (stmt
);
1669 if (t
&& !disqualify_if_bad_bb_terminating_stmt (stmt
, t
, NULL
))
1671 /* If the STMT is a call to DEFERRED_INIT, avoid setting
1672 cannot_scalarize_away_bitmap. */
1673 if (gimple_call_internal_p (stmt
, IFN_DEFERRED_INIT
))
1674 ret
|= !!build_access_from_expr_1 (t
, stmt
, true);
1676 ret
|= build_access_from_expr (t
, stmt
, true);
1682 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
1683 if (stmt_ends_bb_p (asm_stmt
)
1684 && !single_succ_p (gimple_bb (asm_stmt
)))
1686 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); i
++)
1688 t
= TREE_VALUE (gimple_asm_input_op (asm_stmt
, i
));
1689 disqualify_base_of_expr (t
, "OP of asm goto.");
1691 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); i
++)
1693 t
= TREE_VALUE (gimple_asm_output_op (asm_stmt
, i
));
1694 disqualify_base_of_expr (t
, "OP of asm goto.");
1699 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); i
++)
1701 t
= TREE_VALUE (gimple_asm_input_op (asm_stmt
, i
));
1702 ret
|= build_access_from_expr (t
, asm_stmt
, false);
1704 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); i
++)
1706 t
= TREE_VALUE (gimple_asm_output_op (asm_stmt
, i
));
1707 ret
|= build_access_from_expr (t
, asm_stmt
, true);
1722 /* Helper of QSORT function. There are pointers to accesses in the array. An
1723 access is considered smaller than another if it has smaller offset or if the
1724 offsets are the same but is size is bigger. */
1727 compare_access_positions (const void *a
, const void *b
)
1729 const access_p
*fp1
= (const access_p
*) a
;
1730 const access_p
*fp2
= (const access_p
*) b
;
1731 const access_p f1
= *fp1
;
1732 const access_p f2
= *fp2
;
1734 if (f1
->offset
!= f2
->offset
)
1735 return f1
->offset
< f2
->offset
? -1 : 1;
1737 if (f1
->size
== f2
->size
)
1739 if (f1
->type
== f2
->type
)
1741 /* Put any non-aggregate type before any aggregate type. */
1742 else if (!is_gimple_reg_type (f1
->type
)
1743 && is_gimple_reg_type (f2
->type
))
1745 else if (is_gimple_reg_type (f1
->type
)
1746 && !is_gimple_reg_type (f2
->type
))
1748 /* Put any complex or vector type before any other scalar type. */
1749 else if (TREE_CODE (f1
->type
) != COMPLEX_TYPE
1750 && TREE_CODE (f1
->type
) != VECTOR_TYPE
1751 && (TREE_CODE (f2
->type
) == COMPLEX_TYPE
1752 || VECTOR_TYPE_P (f2
->type
)))
1754 else if ((TREE_CODE (f1
->type
) == COMPLEX_TYPE
1755 || VECTOR_TYPE_P (f1
->type
))
1756 && TREE_CODE (f2
->type
) != COMPLEX_TYPE
1757 && TREE_CODE (f2
->type
) != VECTOR_TYPE
)
1759 /* Put any integral type before any non-integral type. When splicing, we
1760 make sure that those with insufficient precision and occupying the
1761 same space are not scalarized. */
1762 else if (INTEGRAL_TYPE_P (f1
->type
)
1763 && !INTEGRAL_TYPE_P (f2
->type
))
1765 else if (!INTEGRAL_TYPE_P (f1
->type
)
1766 && INTEGRAL_TYPE_P (f2
->type
))
1768 /* Put the integral type with the bigger precision first. */
1769 else if (INTEGRAL_TYPE_P (f1
->type
)
1770 && INTEGRAL_TYPE_P (f2
->type
)
1771 && (TYPE_PRECISION (f2
->type
) != TYPE_PRECISION (f1
->type
)))
1772 return TYPE_PRECISION (f2
->type
) - TYPE_PRECISION (f1
->type
);
1773 /* Stabilize the sort. */
1774 return TYPE_UID (f1
->type
) - TYPE_UID (f2
->type
);
1777 /* We want the bigger accesses first, thus the opposite operator in the next
1779 return f1
->size
> f2
->size
? -1 : 1;
1783 /* Append a name of the declaration to the name obstack. A helper function for
1787 make_fancy_decl_name (tree decl
)
1791 tree name
= DECL_NAME (decl
);
1793 obstack_grow (&name_obstack
, IDENTIFIER_POINTER (name
),
1794 IDENTIFIER_LENGTH (name
));
1797 sprintf (buffer
, "D%u", DECL_UID (decl
));
1798 obstack_grow (&name_obstack
, buffer
, strlen (buffer
));
1802 /* Helper for make_fancy_name. */
1805 make_fancy_name_1 (tree expr
)
1812 make_fancy_decl_name (expr
);
1816 switch (TREE_CODE (expr
))
1819 make_fancy_name_1 (TREE_OPERAND (expr
, 0));
1820 obstack_1grow (&name_obstack
, '$');
1821 make_fancy_decl_name (TREE_OPERAND (expr
, 1));
1825 make_fancy_name_1 (TREE_OPERAND (expr
, 0));
1826 obstack_1grow (&name_obstack
, '$');
1827 /* Arrays with only one element may not have a constant as their
1829 index
= TREE_OPERAND (expr
, 1);
1830 if (TREE_CODE (index
) != INTEGER_CST
)
1832 sprintf (buffer
, HOST_WIDE_INT_PRINT_DEC
, TREE_INT_CST_LOW (index
));
1833 obstack_grow (&name_obstack
, buffer
, strlen (buffer
));
1838 make_fancy_name_1 (TREE_OPERAND (expr
, 0));
1842 make_fancy_name_1 (TREE_OPERAND (expr
, 0));
1843 if (!integer_zerop (TREE_OPERAND (expr
, 1)))
1845 obstack_1grow (&name_obstack
, '$');
1846 sprintf (buffer
, HOST_WIDE_INT_PRINT_DEC
,
1847 TREE_INT_CST_LOW (TREE_OPERAND (expr
, 1)));
1848 obstack_grow (&name_obstack
, buffer
, strlen (buffer
));
1854 gcc_unreachable (); /* we treat these as scalars. */
1861 /* Create a human readable name for replacement variable of ACCESS. */
1864 make_fancy_name (tree expr
)
1866 make_fancy_name_1 (expr
);
1867 obstack_1grow (&name_obstack
, '\0');
1868 return XOBFINISH (&name_obstack
, char *);
1871 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1872 EXP_TYPE at the given OFFSET and with storage order REVERSE. If BASE is
1873 something for which get_addr_base_and_unit_offset returns NULL, gsi must
1874 be non-NULL and is used to insert new statements either before or below
1875 the current one as specified by INSERT_AFTER. This function is not capable
1876 of handling bitfields. */
1879 build_ref_for_offset (location_t loc
, tree base
, poly_int64 offset
,
1880 bool reverse
, tree exp_type
, gimple_stmt_iterator
*gsi
,
1883 tree prev_base
= base
;
1886 poly_int64 base_offset
;
1887 unsigned HOST_WIDE_INT misalign
;
1890 /* Preserve address-space information. */
1891 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (base
));
1892 if (as
!= TYPE_ADDR_SPACE (exp_type
))
1893 exp_type
= build_qualified_type (exp_type
,
1894 TYPE_QUALS (exp_type
)
1895 | ENCODE_QUAL_ADDR_SPACE (as
));
1897 poly_int64 byte_offset
= exact_div (offset
, BITS_PER_UNIT
);
1898 get_object_alignment_1 (base
, &align
, &misalign
);
1899 base
= get_addr_base_and_unit_offset (base
, &base_offset
);
1901 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1902 offset such as array[var_index]. */
1908 gcc_checking_assert (gsi
);
1909 tmp
= make_ssa_name (build_pointer_type (TREE_TYPE (prev_base
)));
1910 addr
= build_fold_addr_expr (unshare_expr (prev_base
));
1911 STRIP_USELESS_TYPE_CONVERSION (addr
);
1912 stmt
= gimple_build_assign (tmp
, addr
);
1913 gimple_set_location (stmt
, loc
);
1915 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
1917 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
1919 off
= build_int_cst (reference_alias_ptr_type (prev_base
), byte_offset
);
1922 else if (TREE_CODE (base
) == MEM_REF
)
1924 off
= build_int_cst (TREE_TYPE (TREE_OPERAND (base
, 1)),
1925 base_offset
+ byte_offset
);
1926 off
= int_const_binop (PLUS_EXPR
, TREE_OPERAND (base
, 1), off
);
1927 base
= unshare_expr (TREE_OPERAND (base
, 0));
1931 off
= build_int_cst (reference_alias_ptr_type (prev_base
),
1932 base_offset
+ byte_offset
);
1933 base
= build_fold_addr_expr (unshare_expr (base
));
1936 unsigned int align_bound
= known_alignment (misalign
+ offset
);
1937 if (align_bound
!= 0)
1938 align
= MIN (align
, align_bound
);
1939 if (align
!= TYPE_ALIGN (exp_type
))
1940 exp_type
= build_aligned_type (exp_type
, align
);
1942 mem_ref
= fold_build2_loc (loc
, MEM_REF
, exp_type
, base
, off
);
1943 REF_REVERSE_STORAGE_ORDER (mem_ref
) = reverse
;
1944 if (TREE_THIS_VOLATILE (prev_base
))
1945 TREE_THIS_VOLATILE (mem_ref
) = 1;
1946 if (TREE_SIDE_EFFECTS (prev_base
))
1947 TREE_SIDE_EFFECTS (mem_ref
) = 1;
1951 /* Construct and return a memory reference that is equal to a portion of
1952 MODEL->expr but is based on BASE. If this cannot be done, return NULL. */
1955 build_reconstructed_reference (location_t
, tree base
, struct access
*model
)
1957 tree expr
= model
->expr
;
1958 /* We have to make sure to start just below the outermost union. */
1959 tree start_expr
= expr
;
1960 while (handled_component_p (expr
))
1962 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (expr
, 0))) == UNION_TYPE
)
1964 expr
= TREE_OPERAND (expr
, 0);
1968 tree prev_expr
= NULL_TREE
;
1969 while (!types_compatible_p (TREE_TYPE (expr
), TREE_TYPE (base
)))
1971 if (!handled_component_p (expr
))
1974 expr
= TREE_OPERAND (expr
, 0);
1977 /* Guard against broken VIEW_CONVERT_EXPRs... */
1981 TREE_OPERAND (prev_expr
, 0) = base
;
1982 tree ref
= unshare_expr (model
->expr
);
1983 TREE_OPERAND (prev_expr
, 0) = expr
;
1987 /* Construct a memory reference to a part of an aggregate BASE at the given
1988 OFFSET and of the same type as MODEL. In case this is a reference to a
1989 bit-field, the function will replicate the last component_ref of model's
1990 expr to access it. INSERT_AFTER and GSI have the same meaning as in
1991 build_ref_for_offset, furthermore, when GSI is NULL, the function expects
1992 that it re-builds the entire reference from a DECL to the final access and
1993 so will create a MEM_REF when OFFSET does not exactly match offset of
1997 build_ref_for_model (location_t loc
, tree base
, HOST_WIDE_INT offset
,
1998 struct access
*model
, gimple_stmt_iterator
*gsi
,
2001 gcc_assert (offset
>= 0);
2002 if (TREE_CODE (model
->expr
) == COMPONENT_REF
2003 && DECL_BIT_FIELD (TREE_OPERAND (model
->expr
, 1)))
2005 /* This access represents a bit-field. */
2006 tree t
, exp_type
, fld
= TREE_OPERAND (model
->expr
, 1);
2008 offset
-= int_bit_position (fld
);
2009 exp_type
= TREE_TYPE (TREE_OPERAND (model
->expr
, 0));
2010 t
= build_ref_for_offset (loc
, base
, offset
, model
->reverse
, exp_type
,
2012 /* The flag will be set on the record type. */
2013 REF_REVERSE_STORAGE_ORDER (t
) = 0;
2014 return fold_build3_loc (loc
, COMPONENT_REF
, TREE_TYPE (fld
), t
, fld
,
2020 if (model
->grp_same_access_path
2021 && !TREE_THIS_VOLATILE (base
)
2022 && (TYPE_ADDR_SPACE (TREE_TYPE (base
))
2023 == TYPE_ADDR_SPACE (TREE_TYPE (model
->expr
)))
2024 && (offset
== model
->offset
2025 || (gsi
&& offset
<= model
->offset
))
2026 /* build_reconstructed_reference can still fail if we have already
2027 massaged BASE because of another type incompatibility. */
2028 && (res
= build_reconstructed_reference (loc
, base
, model
)))
2031 return build_ref_for_offset (loc
, base
, offset
, model
->reverse
,
2032 model
->type
, gsi
, insert_after
);
2036 /* Attempt to build a memory reference that we could but into a gimple
2037 debug_bind statement. Similar to build_ref_for_model but punts if it has to
2038 create statements and return s NULL instead. This function also ignores
2039 alignment issues and so its results should never end up in non-debug
2043 build_debug_ref_for_model (location_t loc
, tree base
, HOST_WIDE_INT offset
,
2044 struct access
*model
)
2046 poly_int64 base_offset
;
2049 if (TREE_CODE (model
->expr
) == COMPONENT_REF
2050 && DECL_BIT_FIELD (TREE_OPERAND (model
->expr
, 1)))
2053 base
= get_addr_base_and_unit_offset (base
, &base_offset
);
2056 if (TREE_CODE (base
) == MEM_REF
)
2058 off
= build_int_cst (TREE_TYPE (TREE_OPERAND (base
, 1)),
2059 base_offset
+ offset
/ BITS_PER_UNIT
);
2060 off
= int_const_binop (PLUS_EXPR
, TREE_OPERAND (base
, 1), off
);
2061 base
= unshare_expr (TREE_OPERAND (base
, 0));
2065 off
= build_int_cst (reference_alias_ptr_type (base
),
2066 base_offset
+ offset
/ BITS_PER_UNIT
);
2067 base
= build_fold_addr_expr (unshare_expr (base
));
2070 return fold_build2_loc (loc
, MEM_REF
, model
->type
, base
, off
);
2073 /* Construct a memory reference consisting of component_refs and array_refs to
2074 a part of an aggregate *RES (which is of type TYPE). The requested part
2075 should have type EXP_TYPE at be the given OFFSET. This function might not
2076 succeed, it returns true when it does and only then *RES points to something
2077 meaningful. This function should be used only to build expressions that we
2078 might need to present to user (e.g. in warnings). In all other situations,
2079 build_ref_for_model or build_ref_for_offset should be used instead. */
2082 build_user_friendly_ref_for_offset (tree
*res
, tree type
, HOST_WIDE_INT offset
,
2088 tree tr_size
, index
, minidx
;
2089 HOST_WIDE_INT el_size
;
2091 if (offset
== 0 && exp_type
2092 && types_compatible_p (exp_type
, type
))
2095 switch (TREE_CODE (type
))
2098 case QUAL_UNION_TYPE
:
2100 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
2102 HOST_WIDE_INT pos
, size
;
2103 tree tr_pos
, expr
, *expr_ptr
;
2105 if (TREE_CODE (fld
) != FIELD_DECL
)
2108 tr_pos
= bit_position (fld
);
2109 if (!tr_pos
|| !tree_fits_uhwi_p (tr_pos
))
2111 pos
= tree_to_uhwi (tr_pos
);
2112 gcc_assert (TREE_CODE (type
) == RECORD_TYPE
|| pos
== 0);
2113 tr_size
= DECL_SIZE (fld
);
2114 if (!tr_size
|| !tree_fits_uhwi_p (tr_size
))
2116 size
= tree_to_uhwi (tr_size
);
2122 else if (pos
> offset
|| (pos
+ size
) <= offset
)
2125 expr
= build3 (COMPONENT_REF
, TREE_TYPE (fld
), *res
, fld
,
2128 if (build_user_friendly_ref_for_offset (expr_ptr
, TREE_TYPE (fld
),
2129 offset
- pos
, exp_type
))
2138 tr_size
= TYPE_SIZE (TREE_TYPE (type
));
2139 if (!tr_size
|| !tree_fits_uhwi_p (tr_size
))
2141 el_size
= tree_to_uhwi (tr_size
);
2143 minidx
= TYPE_MIN_VALUE (TYPE_DOMAIN (type
));
2144 if (TREE_CODE (minidx
) != INTEGER_CST
|| el_size
== 0)
2146 index
= build_int_cst (TYPE_DOMAIN (type
), offset
/ el_size
);
2147 if (!integer_zerop (minidx
))
2148 index
= int_const_binop (PLUS_EXPR
, index
, minidx
);
2149 *res
= build4 (ARRAY_REF
, TREE_TYPE (type
), *res
, index
,
2150 NULL_TREE
, NULL_TREE
);
2151 offset
= offset
% el_size
;
2152 type
= TREE_TYPE (type
);
2167 /* Print message to dump file why a variable was rejected. */
2170 reject (tree var
, const char *msg
)
2172 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2174 fprintf (dump_file
, "Rejected (%d): %s: ", DECL_UID (var
), msg
);
2175 print_generic_expr (dump_file
, var
);
2176 fprintf (dump_file
, "\n");
2180 /* Return true if VAR is a candidate for SRA. */
2183 maybe_add_sra_candidate (tree var
)
2185 tree type
= TREE_TYPE (var
);
2189 if (!AGGREGATE_TYPE_P (type
))
2191 reject (var
, "not aggregate");
2195 if ((is_global_var (var
)
2196 /* There are cases where non-addressable variables fail the
2197 pt_solutions_check test, e.g in gcc.dg/uninit-40.c. */
2198 || (TREE_ADDRESSABLE (var
)
2199 && pt_solution_includes (&cfun
->gimple_df
->escaped_return
, var
))
2200 || (TREE_CODE (var
) == RESULT_DECL
2201 && !DECL_BY_REFERENCE (var
)
2202 && aggregate_value_p (var
, current_function_decl
)))
2203 /* Allow constant-pool entries that "need to live in memory". */
2204 && !constant_decl_p (var
))
2206 reject (var
, "needs to live in memory and escapes or global");
2209 if (TREE_THIS_VOLATILE (var
))
2211 reject (var
, "is volatile");
2214 if (!COMPLETE_TYPE_P (type
))
2216 reject (var
, "has incomplete type");
2219 if (!tree_fits_shwi_p (TYPE_SIZE (type
)))
2221 reject (var
, "type size not fixed");
2224 if (tree_to_shwi (TYPE_SIZE (type
)) == 0)
2226 reject (var
, "type size is zero");
2229 if (type_internals_preclude_sra_p (type
, &msg
))
2234 if (/* Fix for PR 41089. tree-stdarg.cc needs to have va_lists intact but
2235 we also want to schedule it rather late. Thus we ignore it in
2237 (sra_mode
== SRA_MODE_EARLY_INTRA
2238 && is_va_list_type (type
)))
2240 reject (var
, "is va_list");
2244 bitmap_set_bit (candidate_bitmap
, DECL_UID (var
));
2245 slot
= candidates
->find_slot_with_hash (var
, DECL_UID (var
), INSERT
);
2248 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2250 fprintf (dump_file
, "Candidate (%d): ", DECL_UID (var
));
2251 print_generic_expr (dump_file
, var
);
2252 fprintf (dump_file
, "\n");
2258 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
2259 those with type which is suitable for scalarization. */
2262 find_var_candidates (void)
2268 for (parm
= DECL_ARGUMENTS (current_function_decl
);
2270 parm
= DECL_CHAIN (parm
))
2271 ret
|= maybe_add_sra_candidate (parm
);
2273 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
2278 ret
|= maybe_add_sra_candidate (var
);
2284 /* Return true if EXP is a reference chain of COMPONENT_REFs and AREAY_REFs
2285 ending either with a DECL or a MEM_REF with zero offset. */
2288 path_comparable_for_same_access (tree expr
)
2290 while (handled_component_p (expr
))
2292 if (TREE_CODE (expr
) == ARRAY_REF
)
2294 /* SSA name indices can occur here too when the array is of sie one.
2295 But we cannot just re-use array_refs with SSA names elsewhere in
2296 the function, so disallow non-constant indices. TODO: Remove this
2297 limitation after teaching build_reconstructed_reference to replace
2298 the index with the index type lower bound. */
2299 if (TREE_CODE (TREE_OPERAND (expr
, 1)) != INTEGER_CST
)
2302 expr
= TREE_OPERAND (expr
, 0);
2305 if (TREE_CODE (expr
) == MEM_REF
)
2307 if (!zerop (TREE_OPERAND (expr
, 1)))
2311 gcc_assert (DECL_P (expr
));
2316 /* Assuming that EXP1 consists of only COMPONENT_REFs and ARRAY_REFs, return
2317 true if the chain of these handled components are exactly the same as EXP2
2318 and the expression under them is the same DECL or an equivalent MEM_REF.
2319 The reference picked by compare_access_positions must go to EXP1. */
2322 same_access_path_p (tree exp1
, tree exp2
)
2324 if (TREE_CODE (exp1
) != TREE_CODE (exp2
))
2326 /* Special case single-field structures loaded sometimes as the field
2327 and sometimes as the structure. If the field is of a scalar type,
2328 compare_access_positions will put it into exp1.
2330 TODO: The gimple register type condition can be removed if teach
2331 compare_access_positions to put inner types first. */
2332 if (is_gimple_reg_type (TREE_TYPE (exp1
))
2333 && TREE_CODE (exp1
) == COMPONENT_REF
2334 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (exp1
, 0)))
2335 == TYPE_MAIN_VARIANT (TREE_TYPE (exp2
))))
2336 exp1
= TREE_OPERAND (exp1
, 0);
2341 if (!operand_equal_p (exp1
, exp2
, OEP_ADDRESS_OF
))
2347 /* Return true when either T1 is a type that, when loaded into a register and
2348 stored back to memory will yield the same bits or when both T1 and T2 are
2352 types_risk_mangled_binary_repr_p (tree t1
, tree t2
)
2354 if (mode_can_transfer_bits (TYPE_MODE (t1
)))
2357 return !types_compatible_p (t1
, t2
);
2360 /* Sort all accesses for the given variable, check for partial overlaps and
2361 return NULL if there are any. If there are none, pick a representative for
2362 each combination of offset and size and create a linked list out of them.
2363 Return the pointer to the first representative and make sure it is the first
2364 one in the vector of accesses. */
2366 static struct access
*
2367 sort_and_splice_var_accesses (tree var
)
2369 int i
, j
, access_count
;
2370 struct access
*res
, **prev_acc_ptr
= &res
;
2371 vec
<access_p
> *access_vec
;
2373 HOST_WIDE_INT low
= -1, high
= 0;
2375 access_vec
= get_base_access_vector (var
);
2378 access_count
= access_vec
->length ();
2380 /* Sort by <OFFSET, SIZE>. */
2381 access_vec
->qsort (compare_access_positions
);
2384 while (i
< access_count
)
2386 struct access
*access
= (*access_vec
)[i
];
2387 bool grp_write
= access
->write
;
2388 bool grp_read
= !access
->write
;
2389 bool grp_scalar_write
= access
->write
2390 && is_gimple_reg_type (access
->type
);
2391 bool grp_scalar_read
= !access
->write
2392 && is_gimple_reg_type (access
->type
);
2393 bool grp_assignment_read
= access
->grp_assignment_read
;
2394 bool grp_assignment_write
= access
->grp_assignment_write
;
2395 bool multiple_scalar_reads
= false;
2396 bool grp_partial_lhs
= access
->grp_partial_lhs
;
2397 bool first_scalar
= is_gimple_reg_type (access
->type
);
2398 bool unscalarizable_region
= access
->grp_unscalarizable_region
;
2399 bool grp_same_access_path
= true;
2400 bool bf_non_full_precision
2401 = (INTEGRAL_TYPE_P (access
->type
)
2402 && TYPE_PRECISION (access
->type
) != access
->size
2403 && TREE_CODE (access
->expr
) == COMPONENT_REF
2404 && DECL_BIT_FIELD (TREE_OPERAND (access
->expr
, 1)));
2406 if (first
|| access
->offset
>= high
)
2409 low
= access
->offset
;
2410 high
= access
->offset
+ access
->size
;
2412 else if (access
->offset
> low
&& access
->offset
+ access
->size
> high
)
2415 gcc_assert (access
->offset
>= low
2416 && access
->offset
+ access
->size
<= high
);
2418 if (INTEGRAL_TYPE_P (access
->type
)
2419 && TYPE_PRECISION (access
->type
) != access
->size
2420 && bitmap_bit_p (passed_by_ref_in_call
, DECL_UID (access
->base
)))
2422 /* This can lead to performance regressions because we can generate
2423 excessive zero extensions. */
2424 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2426 fprintf (dump_file
, "Won't scalarize ");
2427 print_generic_expr (dump_file
, access
->base
);
2428 fprintf (dump_file
, "(%d), it is passed by reference to a call "
2429 "and there are accesses with precision not covering "
2430 "their type size.", DECL_UID (access
->base
));
2435 grp_same_access_path
= path_comparable_for_same_access (access
->expr
);
2438 while (j
< access_count
)
2440 struct access
*ac2
= (*access_vec
)[j
];
2441 if (ac2
->offset
!= access
->offset
|| ac2
->size
!= access
->size
)
2446 grp_scalar_write
= (grp_scalar_write
2447 || is_gimple_reg_type (ac2
->type
));
2452 if (is_gimple_reg_type (ac2
->type
))
2454 if (grp_scalar_read
)
2455 multiple_scalar_reads
= true;
2457 grp_scalar_read
= true;
2460 grp_assignment_read
|= ac2
->grp_assignment_read
;
2461 grp_assignment_write
|= ac2
->grp_assignment_write
;
2462 grp_partial_lhs
|= ac2
->grp_partial_lhs
;
2463 unscalarizable_region
|= ac2
->grp_unscalarizable_region
;
2464 relink_to_new_repr (access
, ac2
);
2466 /* If there are both aggregate-type and scalar-type accesses with
2467 this combination of size and offset, the comparison function
2468 should have put the scalars first. */
2469 gcc_assert (first_scalar
|| !is_gimple_reg_type (ac2
->type
));
2470 /* It also prefers integral types to non-integral. However, when the
2471 precision of the selected type does not span the entire area and
2472 should also be used for a non-integer (i.e. float), we must not
2473 let that happen. Normally analyze_access_subtree expands the type
2474 to cover the entire area but for bit-fields it doesn't. */
2475 if (bf_non_full_precision
&& !INTEGRAL_TYPE_P (ac2
->type
))
2477 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2479 fprintf (dump_file
, "Cannot scalarize the following access "
2480 "because insufficient precision integer type was "
2482 dump_access (dump_file
, access
, false);
2484 unscalarizable_region
= true;
2486 else if (types_risk_mangled_binary_repr_p (access
->type
, ac2
->type
))
2488 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2490 fprintf (dump_file
, "Cannot scalarize the following access "
2491 "because data would be held in a mode which is not "
2492 "guaranteed to preserve all bits.\n ");
2493 dump_access (dump_file
, access
, false);
2495 unscalarizable_region
= true;
2498 if (grp_same_access_path
2499 && !same_access_path_p (access
->expr
, ac2
->expr
))
2500 grp_same_access_path
= false;
2502 ac2
->group_representative
= access
;
2508 access
->group_representative
= access
;
2509 access
->grp_write
= grp_write
;
2510 access
->grp_read
= grp_read
;
2511 access
->grp_scalar_read
= grp_scalar_read
;
2512 access
->grp_scalar_write
= grp_scalar_write
;
2513 access
->grp_assignment_read
= grp_assignment_read
;
2514 access
->grp_assignment_write
= grp_assignment_write
;
2515 access
->grp_hint
= multiple_scalar_reads
&& !constant_decl_p (var
);
2516 access
->grp_partial_lhs
= grp_partial_lhs
;
2517 access
->grp_unscalarizable_region
= unscalarizable_region
;
2518 access
->grp_same_access_path
= grp_same_access_path
;
2520 *prev_acc_ptr
= access
;
2521 prev_acc_ptr
= &access
->next_grp
;
2524 gcc_assert (res
== (*access_vec
)[0]);
2528 /* Create a variable for the given ACCESS which determines the type, name and a
2529 few other properties. Return the variable declaration and store it also to
2530 ACCESS->replacement. REG_TREE is used when creating a declaration to base a
2531 default-definition SSA name on in order to facilitate an uninitialized
2532 warning. It is used instead of the actual ACCESS type if that is not of a
2533 gimple register type. */
2536 create_access_replacement (struct access
*access
, tree reg_type
= NULL_TREE
)
2540 tree type
= access
->type
;
2541 if (reg_type
&& !is_gimple_reg_type (type
))
2544 if (access
->grp_to_be_debug_replaced
)
2546 repl
= create_tmp_var_raw (access
->type
);
2547 DECL_CONTEXT (repl
) = current_function_decl
;
2550 /* Drop any special alignment on the type if it's not on the main
2551 variant. This avoids issues with weirdo ABIs like AAPCS. */
2552 repl
= create_tmp_var (build_qualified_type (TYPE_MAIN_VARIANT (type
),
2553 TYPE_QUALS (type
)), "SR");
2554 if (access
->grp_partial_lhs
2555 && is_gimple_reg_type (type
))
2556 DECL_NOT_GIMPLE_REG_P (repl
) = 1;
2558 DECL_SOURCE_LOCATION (repl
) = DECL_SOURCE_LOCATION (access
->base
);
2559 DECL_ARTIFICIAL (repl
) = 1;
2560 DECL_IGNORED_P (repl
) = DECL_IGNORED_P (access
->base
);
2562 if (DECL_NAME (access
->base
)
2563 && !DECL_IGNORED_P (access
->base
)
2564 && !DECL_ARTIFICIAL (access
->base
))
2566 char *pretty_name
= make_fancy_name (access
->expr
);
2567 tree debug_expr
= unshare_expr_without_location (access
->expr
), d
;
2570 DECL_NAME (repl
) = get_identifier (pretty_name
);
2571 DECL_NAMELESS (repl
) = 1;
2572 obstack_free (&name_obstack
, pretty_name
);
2574 /* Get rid of any SSA_NAMEs embedded in debug_expr,
2575 as DECL_DEBUG_EXPR isn't considered when looking for still
2576 used SSA_NAMEs and thus they could be freed. All debug info
2577 generation cares is whether something is constant or variable
2578 and that get_ref_base_and_extent works properly on the
2579 expression. It cannot handle accesses at a non-constant offset
2580 though, so just give up in those cases. */
2581 for (d
= debug_expr
;
2582 !fail
&& (handled_component_p (d
) || TREE_CODE (d
) == MEM_REF
);
2583 d
= TREE_OPERAND (d
, 0))
2584 switch (TREE_CODE (d
))
2587 case ARRAY_RANGE_REF
:
2588 if (TREE_OPERAND (d
, 1)
2589 && TREE_CODE (TREE_OPERAND (d
, 1)) != INTEGER_CST
)
2591 if (TREE_OPERAND (d
, 3)
2592 && TREE_CODE (TREE_OPERAND (d
, 3)) != INTEGER_CST
)
2596 if (TREE_OPERAND (d
, 2)
2597 && TREE_CODE (TREE_OPERAND (d
, 2)) != INTEGER_CST
)
2601 if (TREE_CODE (TREE_OPERAND (d
, 0)) != ADDR_EXPR
)
2604 d
= TREE_OPERAND (d
, 0);
2611 SET_DECL_DEBUG_EXPR (repl
, debug_expr
);
2612 DECL_HAS_DEBUG_EXPR_P (repl
) = 1;
2614 if (access
->grp_no_warning
)
2615 suppress_warning (repl
/* Be more selective! */);
2617 copy_warning (repl
, access
->base
);
2620 suppress_warning (repl
/* Be more selective! */);
2624 if (access
->grp_to_be_debug_replaced
)
2626 fprintf (dump_file
, "Created a debug-only replacement for ");
2627 print_generic_expr (dump_file
, access
->base
);
2628 fprintf (dump_file
, " offset: %u, size: %u\n",
2629 (unsigned) access
->offset
, (unsigned) access
->size
);
2633 fprintf (dump_file
, "Created a replacement for ");
2634 print_generic_expr (dump_file
, access
->base
);
2635 fprintf (dump_file
, " offset: %u, size: %u: ",
2636 (unsigned) access
->offset
, (unsigned) access
->size
);
2637 print_generic_expr (dump_file
, repl
, TDF_UID
);
2638 fprintf (dump_file
, "\n");
2641 sra_stats
.replacements
++;
2646 /* Return ACCESS scalar replacement, which must exist. */
2649 get_access_replacement (struct access
*access
)
2651 gcc_checking_assert (access
->replacement_decl
);
2652 return access
->replacement_decl
;
2656 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
2657 linked list along the way. Stop when *ACCESS is NULL or the access pointed
2658 to it is not "within" the root. Return false iff some accesses partially
2662 build_access_subtree (struct access
**access
)
2664 struct access
*root
= *access
, *last_child
= NULL
;
2665 HOST_WIDE_INT limit
= root
->offset
+ root
->size
;
2667 *access
= (*access
)->next_grp
;
2668 while (*access
&& (*access
)->offset
+ (*access
)->size
<= limit
)
2671 root
->first_child
= *access
;
2673 last_child
->next_sibling
= *access
;
2674 last_child
= *access
;
2675 (*access
)->parent
= root
;
2676 (*access
)->grp_write
|= root
->grp_write
;
2678 if (!build_access_subtree (access
))
2682 if (*access
&& (*access
)->offset
< limit
)
2688 /* Build a tree of access representatives, ACCESS is the pointer to the first
2689 one, others are linked in a list by the next_grp field. Return false iff
2690 some accesses partially overlap. */
2693 build_access_trees (struct access
*access
)
2697 struct access
*root
= access
;
2699 if (!build_access_subtree (&access
))
2701 root
->next_grp
= access
;
2706 /* Traverse the access forest where ROOT is the first root and verify that
2707 various important invariants hold true. */
2710 verify_sra_access_forest (struct access
*root
)
2712 struct access
*access
= root
;
2713 tree first_base
= root
->base
;
2714 gcc_assert (DECL_P (first_base
));
2717 gcc_assert (access
->base
== first_base
);
2719 gcc_assert (access
->offset
>= access
->parent
->offset
2720 && access
->size
<= access
->parent
->size
);
2721 if (access
->next_sibling
)
2722 gcc_assert (access
->next_sibling
->offset
2723 >= access
->offset
+ access
->size
);
2725 poly_int64 poffset
, psize
, pmax_size
;
2727 tree base
= get_ref_base_and_extent (access
->expr
, &poffset
, &psize
,
2728 &pmax_size
, &reverse
);
2729 HOST_WIDE_INT offset
, size
, max_size
;
2730 if (!poffset
.is_constant (&offset
)
2731 || !psize
.is_constant (&size
)
2732 || !pmax_size
.is_constant (&max_size
))
2734 gcc_assert (base
== first_base
);
2735 gcc_assert (offset
== access
->offset
);
2736 gcc_assert (access
->grp_unscalarizable_region
2737 || access
->grp_total_scalarization
2738 || size
== max_size
);
2739 gcc_assert (access
->grp_unscalarizable_region
2740 || !is_gimple_reg_type (access
->type
)
2741 || size
== access
->size
);
2742 gcc_assert (reverse
== access
->reverse
);
2744 if (access
->first_child
)
2746 gcc_assert (access
->first_child
->parent
== access
);
2747 access
= access
->first_child
;
2749 else if (access
->next_sibling
)
2751 gcc_assert (access
->next_sibling
->parent
== access
->parent
);
2752 access
= access
->next_sibling
;
2756 while (access
->parent
&& !access
->next_sibling
)
2757 access
= access
->parent
;
2758 if (access
->next_sibling
)
2759 access
= access
->next_sibling
;
2762 gcc_assert (access
== root
);
2763 root
= root
->next_grp
;
2771 /* Verify access forests of all candidates with accesses by calling
2772 verify_access_forest on each on them. */
2775 verify_all_sra_access_forests (void)
2779 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap
, 0, i
, bi
)
2781 tree var
= candidate (i
);
2782 struct access
*access
= get_first_repr_for_decl (var
);
2785 gcc_assert (access
->base
== var
);
2786 verify_sra_access_forest (access
);
2791 /* Return true if expr contains some ARRAY_REFs into a variable bounded
2795 expr_with_var_bounded_array_refs_p (tree expr
)
2797 while (handled_component_p (expr
))
2799 if (TREE_CODE (expr
) == ARRAY_REF
2800 && !tree_fits_shwi_p (array_ref_low_bound (expr
)))
2802 expr
= TREE_OPERAND (expr
, 0);
2807 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
2808 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. If TOTALLY
2809 is set, we are totally scalarizing the aggregate. Also set all sorts of
2810 access flags appropriately along the way, notably always set grp_read and
2811 grp_assign_read according to MARK_READ and grp_write when MARK_WRITE is
2814 Creating a replacement for a scalar access is considered beneficial if its
2815 grp_hint ot TOTALLY is set (this means either that there is more than one
2816 direct read access or that we are attempting total scalarization) or
2817 according to the following table:
2819 Access written to through a scalar type (once or more times)
2821 | Written to in an assignment statement
2823 | | Access read as scalar _once_
2825 | | | Read in an assignment statement
2827 | | | | Scalarize Comment
2828 -----------------------------------------------------------------------------
2829 0 0 0 0 No access for the scalar
2830 0 0 0 1 No access for the scalar
2831 0 0 1 0 No Single read - won't help
2832 0 0 1 1 No The same case
2833 0 1 0 0 No access for the scalar
2834 0 1 0 1 No access for the scalar
2835 0 1 1 0 Yes s = *g; return s.i;
2836 0 1 1 1 Yes The same case as above
2837 1 0 0 0 No Won't help
2838 1 0 0 1 Yes s.i = 1; *g = s;
2839 1 0 1 0 Yes s.i = 5; g = s.i;
2840 1 0 1 1 Yes The same case as above
2841 1 1 0 0 No Won't help.
2842 1 1 0 1 Yes s.i = 1; *g = s;
2843 1 1 1 0 Yes s = *g; return s.i;
2844 1 1 1 1 Yes Any of the above yeses */
2847 analyze_access_subtree (struct access
*root
, struct access
*parent
,
2848 bool allow_replacements
, bool totally
)
2850 struct access
*child
;
2851 HOST_WIDE_INT limit
= root
->offset
+ root
->size
;
2852 HOST_WIDE_INT covered_to
= root
->offset
;
2853 bool scalar
= is_gimple_reg_type (root
->type
);
2854 bool hole
= false, sth_created
= false;
2858 if (parent
->grp_read
)
2860 if (parent
->grp_assignment_read
)
2861 root
->grp_assignment_read
= 1;
2862 if (parent
->grp_write
)
2863 root
->grp_write
= 1;
2864 if (parent
->grp_assignment_write
)
2865 root
->grp_assignment_write
= 1;
2866 if (!parent
->grp_same_access_path
)
2867 root
->grp_same_access_path
= 0;
2870 if (root
->grp_unscalarizable_region
)
2871 allow_replacements
= false;
2873 if (allow_replacements
&& expr_with_var_bounded_array_refs_p (root
->expr
))
2874 allow_replacements
= false;
2876 if (!totally
&& root
->grp_result_of_prop_from_lhs
)
2877 allow_replacements
= false;
2879 for (child
= root
->first_child
; child
; child
= child
->next_sibling
)
2881 hole
|= covered_to
< child
->offset
;
2882 sth_created
|= analyze_access_subtree (child
, root
,
2883 allow_replacements
&& !scalar
2884 && !root
->grp_partial_lhs
,
2887 root
->grp_unscalarized_data
|= child
->grp_unscalarized_data
;
2888 if (child
->grp_covered
)
2889 covered_to
+= child
->size
;
2894 if (allow_replacements
&& scalar
&& !root
->first_child
2895 && (totally
|| !root
->grp_total_scalarization
)
2898 || ((root
->grp_scalar_read
|| root
->grp_assignment_read
)
2899 && (root
->grp_scalar_write
|| root
->grp_assignment_write
))))
2901 /* Always create access replacements that cover the whole access.
2902 For integral types this means the precision has to match.
2903 Avoid assumptions based on the integral type kind, too. */
2904 if (INTEGRAL_TYPE_P (root
->type
)
2905 && ((TREE_CODE (root
->type
) != INTEGER_TYPE
2906 && TREE_CODE (root
->type
) != BITINT_TYPE
)
2907 || TYPE_PRECISION (root
->type
) != root
->size
)
2908 /* But leave bitfield accesses alone. */
2909 && (TREE_CODE (root
->expr
) != COMPONENT_REF
2910 || !DECL_BIT_FIELD (TREE_OPERAND (root
->expr
, 1))))
2912 tree rt
= root
->type
;
2913 gcc_assert ((root
->offset
% BITS_PER_UNIT
) == 0
2914 && (root
->size
% BITS_PER_UNIT
) == 0);
2915 if (TREE_CODE (root
->type
) == BITINT_TYPE
)
2916 root
->type
= build_bitint_type (root
->size
, TYPE_UNSIGNED (rt
));
2918 root
->type
= build_nonstandard_integer_type (root
->size
,
2919 TYPE_UNSIGNED (rt
));
2920 root
->expr
= build_ref_for_offset (UNKNOWN_LOCATION
, root
->base
,
2921 root
->offset
, root
->reverse
,
2922 root
->type
, NULL
, false);
2924 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2926 fprintf (dump_file
, "Changing the type of a replacement for ");
2927 print_generic_expr (dump_file
, root
->base
);
2928 fprintf (dump_file
, " offset: %u, size: %u ",
2929 (unsigned) root
->offset
, (unsigned) root
->size
);
2930 fprintf (dump_file
, " to an integer.\n");
2934 root
->grp_to_be_replaced
= 1;
2935 root
->replacement_decl
= create_access_replacement (root
);
2941 if (allow_replacements
2942 && scalar
&& !root
->first_child
2943 && !root
->grp_total_scalarization
2944 && (root
->grp_scalar_write
|| root
->grp_assignment_write
)
2945 && !bitmap_bit_p (cannot_scalarize_away_bitmap
,
2946 DECL_UID (root
->base
)))
2948 gcc_checking_assert (!root
->grp_scalar_read
2949 && !root
->grp_assignment_read
);
2951 if (MAY_HAVE_DEBUG_BIND_STMTS
)
2953 root
->grp_to_be_debug_replaced
= 1;
2954 root
->replacement_decl
= create_access_replacement (root
);
2958 if (covered_to
< limit
)
2960 if (scalar
|| !allow_replacements
)
2961 root
->grp_total_scalarization
= 0;
2964 if (!hole
|| totally
)
2965 root
->grp_covered
= 1;
2966 else if (root
->grp_write
|| comes_initialized_p (root
->base
))
2967 root
->grp_unscalarized_data
= 1; /* not covered and written to */
2971 /* Analyze all access trees linked by next_grp by the means of
2972 analyze_access_subtree. */
2974 analyze_access_trees (struct access
*access
)
2980 if (analyze_access_subtree (access
, NULL
, true,
2981 access
->grp_total_scalarization
))
2983 access
= access
->next_grp
;
2989 /* Return true iff a potential new child of ACC at offset OFFSET and with size
2990 SIZE would conflict with an already existing one. If exactly such a child
2991 already exists in ACC, store a pointer to it in EXACT_MATCH. */
2994 child_would_conflict_in_acc (struct access
*acc
, HOST_WIDE_INT norm_offset
,
2995 HOST_WIDE_INT size
, struct access
**exact_match
)
2997 struct access
*child
;
2999 for (child
= acc
->first_child
; child
; child
= child
->next_sibling
)
3001 if (child
->offset
== norm_offset
&& child
->size
== size
)
3003 *exact_match
= child
;
3007 if (child
->offset
< norm_offset
+ size
3008 && child
->offset
+ child
->size
> norm_offset
)
3015 /* Create a new child access of PARENT, with all properties just like MODEL
3016 except for its offset and with its grp_write false and grp_read true.
3017 Return the new access or NULL if it cannot be created. Note that this
3018 access is created long after all splicing and sorting, it's not located in
3019 any access vector and is automatically a representative of its group. Set
3020 the gpr_write flag of the new accesss if SET_GRP_WRITE is true. */
3022 static struct access
*
3023 create_artificial_child_access (struct access
*parent
, struct access
*model
,
3024 HOST_WIDE_INT new_offset
,
3025 bool set_grp_read
, bool set_grp_write
)
3027 struct access
**child
;
3028 tree expr
= parent
->base
;
3030 gcc_assert (!model
->grp_unscalarizable_region
);
3032 struct access
*access
= access_pool
.allocate ();
3033 memset (access
, 0, sizeof (struct access
));
3034 if (!build_user_friendly_ref_for_offset (&expr
, TREE_TYPE (expr
), new_offset
,
3037 access
->grp_no_warning
= true;
3038 expr
= build_ref_for_model (EXPR_LOCATION (parent
->base
), parent
->base
,
3039 new_offset
, model
, NULL
, false);
3042 access
->base
= parent
->base
;
3043 access
->expr
= expr
;
3044 access
->offset
= new_offset
;
3045 access
->size
= model
->size
;
3046 access
->type
= model
->type
;
3047 access
->parent
= parent
;
3048 access
->grp_read
= set_grp_read
;
3049 access
->grp_write
= set_grp_write
;
3050 access
->reverse
= model
->reverse
;
3052 child
= &parent
->first_child
;
3053 while (*child
&& (*child
)->offset
< new_offset
)
3054 child
= &(*child
)->next_sibling
;
3056 access
->next_sibling
= *child
;
3063 /* Beginning with ACCESS, traverse its whole access subtree and mark all
3064 sub-trees as written to. If any of them has not been marked so previously
3065 and has assignment links leading from it, re-enqueue it. */
3068 subtree_mark_written_and_rhs_enqueue (struct access
*access
)
3070 if (access
->grp_write
)
3072 access
->grp_write
= true;
3073 add_access_to_rhs_work_queue (access
);
3075 struct access
*child
;
3076 for (child
= access
->first_child
; child
; child
= child
->next_sibling
)
3077 subtree_mark_written_and_rhs_enqueue (child
);
3080 /* If there is still budget to create a propagation access for DECL, return
3081 true and decrement the budget. Otherwise return false. */
3084 budget_for_propagation_access (tree decl
)
3086 unsigned b
, *p
= propagation_budget
->get (decl
);
3090 b
= param_sra_max_propagations
;
3096 if (b
== 0 && dump_file
&& (dump_flags
& TDF_DETAILS
))
3098 fprintf (dump_file
, "The propagation budget of ");
3099 print_generic_expr (dump_file
, decl
);
3100 fprintf (dump_file
, " (UID: %u) has been exhausted.\n", DECL_UID (decl
));
3102 propagation_budget
->put (decl
, b
);
3106 /* Return true if ACC or any of its subaccesses has grp_child set. */
3109 access_or_its_child_written (struct access
*acc
)
3113 for (struct access
*sub
= acc
->first_child
; sub
; sub
= sub
->next_sibling
)
3114 if (access_or_its_child_written (sub
))
3119 /* Propagate subaccesses and grp_write flags of RACC across an assignment link
3120 to LACC. Enqueue sub-accesses as necessary so that the write flag is
3121 propagated transitively. Return true if anything changed. Additionally, if
3122 RACC is a scalar access but LACC is not, change the type of the latter, if
3126 propagate_subaccesses_from_rhs (struct access
*lacc
, struct access
*racc
)
3128 struct access
*rchild
;
3129 HOST_WIDE_INT norm_delta
= lacc
->offset
- racc
->offset
;
3132 /* IF the LHS is still not marked as being written to, we only need to do so
3133 if the RHS at this level actually was. */
3134 if (!lacc
->grp_write
)
3136 gcc_checking_assert (!comes_initialized_p (racc
->base
));
3137 if (racc
->grp_write
)
3139 subtree_mark_written_and_rhs_enqueue (lacc
);
3144 if (is_gimple_reg_type (lacc
->type
)
3145 || lacc
->grp_unscalarizable_region
3146 || racc
->grp_unscalarizable_region
)
3148 if (!lacc
->grp_write
)
3151 subtree_mark_written_and_rhs_enqueue (lacc
);
3156 if (is_gimple_reg_type (racc
->type
))
3158 if (!lacc
->grp_write
)
3161 subtree_mark_written_and_rhs_enqueue (lacc
);
3163 if (!lacc
->first_child
3164 && !racc
->first_child
3165 && !types_risk_mangled_binary_repr_p (racc
->type
, lacc
->type
))
3167 /* We are about to change the access type from aggregate to scalar,
3168 so we need to put the reverse flag onto the access, if any. */
3170 = TYPE_REVERSE_STORAGE_ORDER (lacc
->type
)
3171 && !POINTER_TYPE_P (racc
->type
)
3172 && !VECTOR_TYPE_P (racc
->type
);
3173 tree t
= lacc
->base
;
3175 lacc
->type
= racc
->type
;
3176 if (build_user_friendly_ref_for_offset (&t
, TREE_TYPE (t
),
3177 lacc
->offset
, racc
->type
))
3180 lacc
->grp_same_access_path
= true;
3184 lacc
->expr
= build_ref_for_model (EXPR_LOCATION (lacc
->base
),
3185 lacc
->base
, lacc
->offset
,
3187 if (TREE_CODE (lacc
->expr
) == MEM_REF
)
3188 REF_REVERSE_STORAGE_ORDER (lacc
->expr
) = reverse
;
3189 lacc
->grp_no_warning
= true;
3190 lacc
->grp_same_access_path
= false;
3192 lacc
->reverse
= reverse
;
3197 for (rchild
= racc
->first_child
; rchild
; rchild
= rchild
->next_sibling
)
3199 struct access
*new_acc
= NULL
;
3200 HOST_WIDE_INT norm_offset
= rchild
->offset
+ norm_delta
;
3202 if (child_would_conflict_in_acc (lacc
, norm_offset
, rchild
->size
,
3207 if (!new_acc
->grp_write
&& rchild
->grp_write
)
3209 gcc_assert (!lacc
->grp_write
);
3210 subtree_mark_written_and_rhs_enqueue (new_acc
);
3214 rchild
->grp_hint
= 1;
3215 new_acc
->grp_hint
|= new_acc
->grp_read
;
3216 if (rchild
->first_child
3217 && propagate_subaccesses_from_rhs (new_acc
, rchild
))
3220 add_access_to_rhs_work_queue (new_acc
);
3225 if (!lacc
->grp_write
)
3228 subtree_mark_written_and_rhs_enqueue (lacc
);
3234 if (rchild
->grp_unscalarizable_region
3235 || !budget_for_propagation_access (lacc
->base
))
3237 if (!lacc
->grp_write
&& access_or_its_child_written (rchild
))
3240 subtree_mark_written_and_rhs_enqueue (lacc
);
3245 rchild
->grp_hint
= 1;
3246 /* Because get_ref_base_and_extent always includes padding in size for
3247 accesses to DECLs but not necessarily for COMPONENT_REFs of the same
3248 type, we might be actually attempting to here to create a child of the
3249 same type as the parent. */
3250 if (!types_compatible_p (lacc
->type
, rchild
->type
))
3251 new_acc
= create_artificial_child_access (lacc
, rchild
, norm_offset
,
3254 || rchild
->grp_write
));
3257 gcc_checking_assert (new_acc
);
3258 if (racc
->first_child
)
3259 propagate_subaccesses_from_rhs (new_acc
, rchild
);
3261 add_access_to_rhs_work_queue (lacc
);
3268 /* Propagate subaccesses of LACC across an assignment link to RACC if they
3269 should inhibit total scalarization of the corresponding area. No flags are
3270 being propagated in the process. Return true if anything changed. */
3273 propagate_subaccesses_from_lhs (struct access
*lacc
, struct access
*racc
)
3275 if (is_gimple_reg_type (racc
->type
)
3276 || lacc
->grp_unscalarizable_region
3277 || racc
->grp_unscalarizable_region
)
3280 /* TODO: Do we want set some new racc flag to stop potential total
3281 scalarization if lacc is a scalar access (and none fo the two have
3285 HOST_WIDE_INT norm_delta
= racc
->offset
- lacc
->offset
;
3286 for (struct access
*lchild
= lacc
->first_child
;
3288 lchild
= lchild
->next_sibling
)
3290 struct access
*matching_acc
= NULL
;
3291 HOST_WIDE_INT norm_offset
= lchild
->offset
+ norm_delta
;
3293 if (lchild
->grp_unscalarizable_region
3294 || child_would_conflict_in_acc (racc
, norm_offset
, lchild
->size
,
3296 || !budget_for_propagation_access (racc
->base
))
3299 && propagate_subaccesses_from_lhs (lchild
, matching_acc
))
3300 add_access_to_lhs_work_queue (matching_acc
);
3304 /* Because get_ref_base_and_extent always includes padding in size for
3305 accesses to DECLs but not necessarily for COMPONENT_REFs of the same
3306 type, we might be actually attempting to here to create a child of the
3307 same type as the parent. */
3308 if (!types_compatible_p (racc
->type
, lchild
->type
))
3310 struct access
*new_acc
3311 = create_artificial_child_access (racc
, lchild
, norm_offset
,
3313 new_acc
->grp_result_of_prop_from_lhs
= 1;
3314 propagate_subaccesses_from_lhs (lchild
, new_acc
);
3317 propagate_subaccesses_from_lhs (lchild
, racc
);
3323 /* Propagate all subaccesses across assignment links. */
3326 propagate_all_subaccesses (void)
3328 propagation_budget
= new hash_map
<tree
, unsigned>;
3329 while (rhs_work_queue_head
)
3331 struct access
*racc
= pop_access_from_rhs_work_queue ();
3332 struct assign_link
*link
;
3334 if (racc
->group_representative
)
3335 racc
= racc
->group_representative
;
3336 gcc_assert (racc
->first_rhs_link
);
3338 for (link
= racc
->first_rhs_link
; link
; link
= link
->next_rhs
)
3340 struct access
*lacc
= link
->lacc
;
3342 if (!bitmap_bit_p (candidate_bitmap
, DECL_UID (lacc
->base
)))
3344 lacc
= lacc
->group_representative
;
3346 bool reque_parents
= false;
3347 if (!bitmap_bit_p (candidate_bitmap
, DECL_UID (racc
->base
)))
3349 if (!lacc
->grp_write
)
3351 subtree_mark_written_and_rhs_enqueue (lacc
);
3352 reque_parents
= true;
3355 else if (propagate_subaccesses_from_rhs (lacc
, racc
))
3356 reque_parents
= true;
3361 add_access_to_rhs_work_queue (lacc
);
3362 lacc
= lacc
->parent
;
3368 while (lhs_work_queue_head
)
3370 struct access
*lacc
= pop_access_from_lhs_work_queue ();
3371 struct assign_link
*link
;
3373 if (lacc
->group_representative
)
3374 lacc
= lacc
->group_representative
;
3375 gcc_assert (lacc
->first_lhs_link
);
3377 if (!bitmap_bit_p (candidate_bitmap
, DECL_UID (lacc
->base
)))
3380 for (link
= lacc
->first_lhs_link
; link
; link
= link
->next_lhs
)
3382 struct access
*racc
= link
->racc
;
3384 if (racc
->group_representative
)
3385 racc
= racc
->group_representative
;
3386 if (!bitmap_bit_p (candidate_bitmap
, DECL_UID (racc
->base
)))
3388 if (propagate_subaccesses_from_lhs (lacc
, racc
))
3389 add_access_to_lhs_work_queue (racc
);
3392 delete propagation_budget
;
3395 /* Return true if the forest beginning with ROOT does not contain
3396 unscalarizable regions or non-byte aligned accesses. */
3399 can_totally_scalarize_forest_p (struct access
*root
)
3401 struct access
*access
= root
;
3404 if (access
->grp_unscalarizable_region
3405 || (access
->offset
% BITS_PER_UNIT
) != 0
3406 || (access
->size
% BITS_PER_UNIT
) != 0
3407 || (is_gimple_reg_type (access
->type
)
3408 && access
->first_child
))
3411 if (access
->first_child
)
3412 access
= access
->first_child
;
3413 else if (access
->next_sibling
)
3414 access
= access
->next_sibling
;
3417 while (access
->parent
&& !access
->next_sibling
)
3418 access
= access
->parent
;
3419 if (access
->next_sibling
)
3420 access
= access
->next_sibling
;
3423 gcc_assert (access
== root
);
3424 root
= root
->next_grp
;
3433 /* Create and return an ACCESS in PARENT spanning from POS with SIZE, TYPE and
3434 reference EXPR for total scalarization purposes and mark it as such. Within
3435 the children of PARENT, link it in between PTR and NEXT_SIBLING. */
3437 static struct access
*
3438 create_total_scalarization_access (struct access
*parent
, HOST_WIDE_INT pos
,
3439 HOST_WIDE_INT size
, tree type
, tree expr
,
3440 struct access
**ptr
,
3441 struct access
*next_sibling
)
3443 struct access
*access
= access_pool
.allocate ();
3444 memset (access
, 0, sizeof (struct access
));
3445 access
->base
= parent
->base
;
3446 access
->offset
= pos
;
3447 access
->size
= size
;
3448 access
->expr
= expr
;
3449 access
->type
= type
;
3450 access
->parent
= parent
;
3451 access
->grp_write
= parent
->grp_write
;
3452 access
->grp_total_scalarization
= 1;
3453 access
->grp_hint
= 1;
3454 access
->grp_same_access_path
= path_comparable_for_same_access (expr
);
3455 access
->reverse
= reverse_storage_order_for_component_p (expr
);
3457 access
->next_sibling
= next_sibling
;
3462 /* Create and return an ACCESS in PARENT spanning from POS with SIZE, TYPE and
3463 reference EXPR for total scalarization purposes and mark it as such, link it
3464 at *PTR and reshape the tree so that those elements at *PTR and their
3465 siblings which fall within the part described by POS and SIZE are moved to
3466 be children of the new access. If a partial overlap is detected, return
3469 static struct access
*
3470 create_total_access_and_reshape (struct access
*parent
, HOST_WIDE_INT pos
,
3471 HOST_WIDE_INT size
, tree type
, tree expr
,
3472 struct access
**ptr
)
3474 struct access
**p
= ptr
;
3476 while (*p
&& (*p
)->offset
< pos
+ size
)
3478 if ((*p
)->offset
+ (*p
)->size
> pos
+ size
)
3480 p
= &(*p
)->next_sibling
;
3483 struct access
*next_child
= *ptr
;
3484 struct access
*new_acc
3485 = create_total_scalarization_access (parent
, pos
, size
, type
, expr
,
3489 new_acc
->first_child
= next_child
;
3491 for (struct access
*a
= next_child
; a
; a
= a
->next_sibling
)
3492 a
->parent
= new_acc
;
3497 static bool totally_scalarize_subtree (struct access
*root
);
3499 /* Return true if INNER is either the same type as OUTER or if it is the type
3500 of a record field in OUTER at offset zero, possibly in nested
3504 access_and_field_type_match_p (tree outer
, tree inner
)
3506 if (TYPE_MAIN_VARIANT (outer
) == TYPE_MAIN_VARIANT (inner
))
3508 if (TREE_CODE (outer
) != RECORD_TYPE
)
3510 tree fld
= TYPE_FIELDS (outer
);
3513 if (TREE_CODE (fld
) == FIELD_DECL
)
3515 if (!zerop (DECL_FIELD_OFFSET (fld
)))
3517 if (TYPE_MAIN_VARIANT (TREE_TYPE (fld
)) == inner
)
3519 if (TREE_CODE (TREE_TYPE (fld
)) == RECORD_TYPE
)
3520 fld
= TYPE_FIELDS (TREE_TYPE (fld
));
3525 fld
= DECL_CHAIN (fld
);
3530 /* Return type of total_should_skip_creating_access indicating whether a total
3531 scalarization access for a field/element should be created, whether it
3532 already exists or whether the entire total scalarization has to fail. */
3534 enum total_sra_field_state
{TOTAL_FLD_CREATE
, TOTAL_FLD_DONE
, TOTAL_FLD_FAILED
};
3536 /* Do all the necessary steps in total scalarization when the given aggregate
3537 type has a TYPE at POS with the given SIZE should be put into PARENT and
3538 when we have processed all its siblings with smaller offsets up until and
3539 including LAST_SEEN_SIBLING (which can be NULL).
3541 If some further siblings are to be skipped, set *LAST_SEEN_SIBLING as
3542 appropriate. Return TOTAL_FLD_CREATE id the caller should carry on with
3543 creating a new access, TOTAL_FLD_DONE if access or accesses capable of
3544 representing the described part of the aggregate for the purposes of total
3545 scalarization already exist or TOTAL_FLD_FAILED if there is a problem which
3546 prevents total scalarization from happening at all. */
3548 static enum total_sra_field_state
3549 total_should_skip_creating_access (struct access
*parent
,
3550 struct access
**last_seen_sibling
,
3551 tree type
, HOST_WIDE_INT pos
,
3554 struct access
*next_child
;
3555 if (!*last_seen_sibling
)
3556 next_child
= parent
->first_child
;
3558 next_child
= (*last_seen_sibling
)->next_sibling
;
3560 /* First, traverse the chain of siblings until it points to an access with
3561 offset at least equal to POS. Check all skipped accesses whether they
3562 span the POS boundary and if so, return with a failure. */
3563 while (next_child
&& next_child
->offset
< pos
)
3565 if (next_child
->offset
+ next_child
->size
> pos
)
3566 return TOTAL_FLD_FAILED
;
3567 *last_seen_sibling
= next_child
;
3568 next_child
= next_child
->next_sibling
;
3571 /* Now check whether next_child has exactly the right POS and SIZE and if so,
3572 whether it can represent what we need and can be totally scalarized
3574 if (next_child
&& next_child
->offset
== pos
3575 && next_child
->size
== size
)
3577 if (!is_gimple_reg_type (next_child
->type
)
3578 && (!access_and_field_type_match_p (type
, next_child
->type
)
3579 || !totally_scalarize_subtree (next_child
)))
3580 return TOTAL_FLD_FAILED
;
3582 *last_seen_sibling
= next_child
;
3583 return TOTAL_FLD_DONE
;
3586 /* If the child we're looking at would partially overlap, we just cannot
3587 totally scalarize. */
3589 && next_child
->offset
< pos
+ size
3590 && next_child
->offset
+ next_child
->size
> pos
+ size
)
3591 return TOTAL_FLD_FAILED
;
3593 if (is_gimple_reg_type (type
))
3595 /* We don't scalarize accesses that are children of other scalar type
3596 accesses, so if we go on and create an access for a register type,
3597 there should not be any pre-existing children. There are rare cases
3598 where the requested type is a vector but we already have register
3599 accesses for all its elements which is equally good. Detect that
3600 situation or whether we need to bail out. */
3602 HOST_WIDE_INT covered
= pos
;
3603 bool skipping
= false;
3605 && next_child
->offset
+ next_child
->size
<= pos
+ size
)
3607 if (next_child
->offset
!= covered
3608 || !is_gimple_reg_type (next_child
->type
))
3609 return TOTAL_FLD_FAILED
;
3611 covered
+= next_child
->size
;
3612 *last_seen_sibling
= next_child
;
3613 next_child
= next_child
->next_sibling
;
3619 if (covered
!= pos
+ size
)
3620 return TOTAL_FLD_FAILED
;
3622 return TOTAL_FLD_DONE
;
3626 return TOTAL_FLD_CREATE
;
3629 /* Go over sub-tree rooted in ROOT and attempt to create scalar accesses
3630 spanning all uncovered areas covered by ROOT, return false if the attempt
3631 failed. All created accesses will have grp_unscalarizable_region set (and
3632 should be ignored if the function returns false). */
3635 totally_scalarize_subtree (struct access
*root
)
3637 gcc_checking_assert (!root
->grp_unscalarizable_region
);
3638 gcc_checking_assert (!is_gimple_reg_type (root
->type
));
3640 struct access
*last_seen_sibling
= NULL
;
3642 switch (TREE_CODE (root
->type
))
3645 for (tree fld
= TYPE_FIELDS (root
->type
); fld
; fld
= DECL_CHAIN (fld
))
3646 if (TREE_CODE (fld
) == FIELD_DECL
)
3648 tree ft
= TREE_TYPE (fld
);
3649 HOST_WIDE_INT fsize
= tree_to_uhwi (DECL_SIZE (fld
));
3653 HOST_WIDE_INT pos
= root
->offset
+ int_bit_position (fld
);
3654 if (pos
+ fsize
> root
->offset
+ root
->size
)
3656 enum total_sra_field_state
3657 state
= total_should_skip_creating_access (root
,
3662 case TOTAL_FLD_FAILED
:
3664 case TOTAL_FLD_DONE
:
3666 case TOTAL_FLD_CREATE
:
3672 struct access
**p
= (last_seen_sibling
3673 ? &last_seen_sibling
->next_sibling
3674 : &root
->first_child
);
3675 tree nref
= build3 (COMPONENT_REF
, ft
, root
->expr
, fld
, NULL_TREE
);
3676 struct access
*new_child
3677 = create_total_access_and_reshape (root
, pos
, fsize
, ft
, nref
, p
);
3681 if (!is_gimple_reg_type (ft
)
3682 && !totally_scalarize_subtree (new_child
))
3684 last_seen_sibling
= new_child
;
3689 tree elemtype
= TREE_TYPE (root
->type
);
3690 HOST_WIDE_INT el_size
;
3691 offset_int idx
, max
;
3692 if (!prepare_iteration_over_array_elts (root
->type
, &el_size
,
3696 for (HOST_WIDE_INT pos
= root
->offset
;
3698 pos
+= el_size
, ++idx
)
3700 enum total_sra_field_state
3701 state
= total_should_skip_creating_access (root
,
3707 case TOTAL_FLD_FAILED
:
3709 case TOTAL_FLD_DONE
:
3711 case TOTAL_FLD_CREATE
:
3717 struct access
**p
= (last_seen_sibling
3718 ? &last_seen_sibling
->next_sibling
3719 : &root
->first_child
);
3720 tree nref
= build4 (ARRAY_REF
, elemtype
, root
->expr
,
3721 wide_int_to_tree (TYPE_DOMAIN (root
->type
),
3723 NULL_TREE
, NULL_TREE
);
3724 struct access
*new_child
3725 = create_total_access_and_reshape (root
, pos
, el_size
, elemtype
,
3730 if (!is_gimple_reg_type (elemtype
)
3731 && !totally_scalarize_subtree (new_child
))
3733 last_seen_sibling
= new_child
;
3743 /* Get the total total scalarization size limit in the current function. */
3745 unsigned HOST_WIDE_INT
3746 sra_get_max_scalarization_size (void)
3748 bool optimize_speed_p
= !optimize_function_for_size_p (cfun
);
3749 /* If the user didn't set PARAM_SRA_MAX_SCALARIZATION_SIZE_<...>,
3750 fall back to a target default. */
3751 unsigned HOST_WIDE_INT max_scalarization_size
3752 = get_move_ratio (optimize_speed_p
) * UNITS_PER_WORD
;
3754 if (optimize_speed_p
)
3756 if (OPTION_SET_P (param_sra_max_scalarization_size_speed
))
3757 max_scalarization_size
= param_sra_max_scalarization_size_speed
;
3761 if (OPTION_SET_P (param_sra_max_scalarization_size_size
))
3762 max_scalarization_size
= param_sra_max_scalarization_size_size
;
3764 max_scalarization_size
*= BITS_PER_UNIT
;
3765 return max_scalarization_size
;
3768 /* Go through all accesses collected throughout the (intraprocedural) analysis
3769 stage, exclude overlapping ones, identify representatives and build trees
3770 out of them, making decisions about scalarization on the way. Return true
3771 iff there are any to-be-scalarized variables after this stage. */
3774 analyze_all_variable_accesses (void)
3777 bitmap tmp
= BITMAP_ALLOC (NULL
);
3781 bitmap_copy (tmp
, candidate_bitmap
);
3782 EXECUTE_IF_SET_IN_BITMAP (tmp
, 0, i
, bi
)
3784 tree var
= candidate (i
);
3785 struct access
*access
;
3787 access
= sort_and_splice_var_accesses (var
);
3788 if (!access
|| !build_access_trees (access
))
3789 disqualify_candidate (var
,
3790 "No or inhibitingly overlapping accesses.");
3793 propagate_all_subaccesses ();
3795 unsigned HOST_WIDE_INT max_scalarization_size
3796 = sra_get_max_scalarization_size ();
3797 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap
, 0, i
, bi
)
3798 if (bitmap_bit_p (should_scalarize_away_bitmap
, i
)
3799 && !bitmap_bit_p (cannot_scalarize_away_bitmap
, i
))
3801 tree var
= candidate (i
);
3805 if (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (var
))) > max_scalarization_size
)
3807 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3809 fprintf (dump_file
, "Too big to totally scalarize: ");
3810 print_generic_expr (dump_file
, var
);
3811 fprintf (dump_file
, " (UID: %u)\n", DECL_UID (var
));
3816 bool all_types_ok
= true;
3817 for (struct access
*access
= get_first_repr_for_decl (var
);
3819 access
= access
->next_grp
)
3820 if (!can_totally_scalarize_forest_p (access
)
3821 || !totally_scalarizable_type_p (access
->type
,
3822 constant_decl_p (var
),
3825 all_types_ok
= false;
3831 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3833 fprintf (dump_file
, "Will attempt to totally scalarize ");
3834 print_generic_expr (dump_file
, var
);
3835 fprintf (dump_file
, " (UID: %u): \n", DECL_UID (var
));
3837 bool scalarized
= true;
3838 for (struct access
*access
= get_first_repr_for_decl (var
);
3840 access
= access
->next_grp
)
3841 if (!is_gimple_reg_type (access
->type
)
3842 && !totally_scalarize_subtree (access
))
3849 for (struct access
*access
= get_first_repr_for_decl (var
);
3851 access
= access
->next_grp
)
3852 access
->grp_total_scalarization
= true;
3856 verify_all_sra_access_forests ();
3858 bitmap_copy (tmp
, candidate_bitmap
);
3859 EXECUTE_IF_SET_IN_BITMAP (tmp
, 0, i
, bi
)
3861 tree var
= candidate (i
);
3862 struct access
*access
= get_first_repr_for_decl (var
);
3864 if (analyze_access_trees (access
))
3867 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3869 fprintf (dump_file
, "\nAccess trees for ");
3870 print_generic_expr (dump_file
, var
);
3871 fprintf (dump_file
, " (UID: %u): \n", DECL_UID (var
));
3872 dump_access_tree (dump_file
, access
);
3873 fprintf (dump_file
, "\n");
3877 disqualify_candidate (var
, "No scalar replacements to be created.");
3884 statistics_counter_event (cfun
, "Scalarized aggregates", res
);
3891 /* Generate statements copying scalar replacements of accesses within a subtree
3892 into or out of AGG. ACCESS, all its children, siblings and their children
3893 are to be processed. AGG is an aggregate type expression (can be a
3894 declaration but does not have to be, it can for example also be a mem_ref or
3895 a series of handled components). TOP_OFFSET is the offset of the processed
3896 subtree which has to be subtracted from offsets of individual accesses to
3897 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
3898 replacements in the interval <start_offset, start_offset + chunk_size>,
3899 otherwise copy all. GSI is a statement iterator used to place the new
3900 statements. WRITE should be true when the statements should write from AGG
3901 to the replacement and false if vice versa. if INSERT_AFTER is true, new
3902 statements will be added after the current statement in GSI, they will be
3903 added before the statement otherwise. */
3906 generate_subtree_copies (struct access
*access
, tree agg
,
3907 HOST_WIDE_INT top_offset
,
3908 HOST_WIDE_INT start_offset
, HOST_WIDE_INT chunk_size
,
3909 gimple_stmt_iterator
*gsi
, bool write
,
3910 bool insert_after
, location_t loc
)
3912 /* Never write anything into constant pool decls. See PR70602. */
3913 if (!write
&& constant_decl_p (agg
))
3917 if (chunk_size
&& access
->offset
>= start_offset
+ chunk_size
)
3920 if (access
->grp_to_be_replaced
3922 || access
->offset
+ access
->size
> start_offset
))
3924 tree expr
, repl
= get_access_replacement (access
);
3927 expr
= build_ref_for_model (loc
, agg
, access
->offset
- top_offset
,
3928 access
, gsi
, insert_after
);
3932 if (access
->grp_partial_lhs
)
3933 expr
= force_gimple_operand_gsi (gsi
, expr
, true, NULL_TREE
,
3935 insert_after
? GSI_NEW_STMT
3937 stmt
= gimple_build_assign (repl
, expr
);
3941 suppress_warning (repl
/* Be more selective! */);
3942 if (access
->grp_partial_lhs
)
3943 repl
= force_gimple_operand_gsi (gsi
, repl
, true, NULL_TREE
,
3945 insert_after
? GSI_NEW_STMT
3947 stmt
= gimple_build_assign (expr
, repl
);
3949 gimple_set_location (stmt
, loc
);
3952 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
3954 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
3956 sra_stats
.subtree_copies
++;
3959 && access
->grp_to_be_debug_replaced
3961 || access
->offset
+ access
->size
> start_offset
))
3964 tree drhs
= build_debug_ref_for_model (loc
, agg
,
3965 access
->offset
- top_offset
,
3967 ds
= gimple_build_debug_bind (get_access_replacement (access
),
3968 drhs
, gsi_stmt (*gsi
));
3970 gsi_insert_after (gsi
, ds
, GSI_NEW_STMT
);
3972 gsi_insert_before (gsi
, ds
, GSI_SAME_STMT
);
3975 if (access
->first_child
)
3976 generate_subtree_copies (access
->first_child
, agg
, top_offset
,
3977 start_offset
, chunk_size
, gsi
,
3978 write
, insert_after
, loc
);
3980 access
= access
->next_sibling
;
3985 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
3986 root of the subtree to be processed. GSI is the statement iterator used
3987 for inserting statements which are added after the current statement if
3988 INSERT_AFTER is true or before it otherwise. */
3991 init_subtree_with_zero (struct access
*access
, gimple_stmt_iterator
*gsi
,
3992 bool insert_after
, location_t loc
)
3995 struct access
*child
;
3997 if (access
->grp_to_be_replaced
)
4001 stmt
= gimple_build_assign (get_access_replacement (access
),
4002 build_zero_cst (access
->type
));
4004 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
4006 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
4008 gimple_set_location (stmt
, loc
);
4010 else if (access
->grp_to_be_debug_replaced
)
4013 = gimple_build_debug_bind (get_access_replacement (access
),
4014 build_zero_cst (access
->type
),
4017 gsi_insert_after (gsi
, ds
, GSI_NEW_STMT
);
4019 gsi_insert_before (gsi
, ds
, GSI_SAME_STMT
);
4022 for (child
= access
->first_child
; child
; child
= child
->next_sibling
)
4023 init_subtree_with_zero (child
, gsi
, insert_after
, loc
);
4026 /* Clobber all scalar replacements in an access subtree. ACCESS is the
4027 root of the subtree to be processed. GSI is the statement iterator used
4028 for inserting statements which are added after the current statement if
4029 INSERT_AFTER is true or before it otherwise. */
4032 clobber_subtree (struct access
*access
, gimple_stmt_iterator
*gsi
,
4033 bool insert_after
, location_t loc
)
4036 struct access
*child
;
4038 if (access
->grp_to_be_replaced
)
4040 tree rep
= get_access_replacement (access
);
4041 tree clobber
= build_clobber (access
->type
);
4042 gimple
*stmt
= gimple_build_assign (rep
, clobber
);
4045 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
4047 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
4049 gimple_set_location (stmt
, loc
);
4052 for (child
= access
->first_child
; child
; child
= child
->next_sibling
)
4053 clobber_subtree (child
, gsi
, insert_after
, loc
);
4056 /* Search for an access representative for the given expression EXPR and
4057 return it or NULL if it cannot be found. */
4059 static struct access
*
4060 get_access_for_expr (tree expr
)
4062 poly_int64 poffset
, psize
, pmax_size
;
4063 HOST_WIDE_INT offset
, max_size
;
4067 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
4068 a different size than the size of its argument and we need the latter
4070 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
4071 expr
= TREE_OPERAND (expr
, 0);
4073 base
= get_ref_base_and_extent (expr
, &poffset
, &psize
, &pmax_size
,
4075 if (!known_size_p (pmax_size
)
4076 || !pmax_size
.is_constant (&max_size
)
4077 || !poffset
.is_constant (&offset
)
4081 if (tree basesize
= DECL_SIZE (base
))
4085 || !poly_int_tree_p (basesize
, &sz
)
4086 || known_le (sz
, offset
))
4091 || !bitmap_bit_p (candidate_bitmap
, DECL_UID (base
)))
4094 return get_var_base_offset_size_access (base
, offset
, max_size
);
4097 /* Replace the expression EXPR with a scalar replacement if there is one and
4098 generate other statements to do type conversion or subtree copying if
4099 necessary. WRITE is true if the expression is being written to (it is on a
4100 LHS of a statement or output in an assembly statement). STMT_GSI is used to
4101 place newly created statements before the processed statement, REFRESH_GSI
4102 is used to place them afterwards - unless the processed statement must end a
4103 BB in which case it is placed on the outgoing non-EH edge. REFRESH_GSI and
4104 is then used to continue iteration over the BB. If sra_modify_expr is
4105 called only once with WRITE equal to true on a given statement, both
4106 iterator parameters can point to the same one. */
4109 sra_modify_expr (tree
*expr
, bool write
, gimple_stmt_iterator
*stmt_gsi
,
4110 gimple_stmt_iterator
*refresh_gsi
)
4113 struct access
*access
;
4114 tree type
, bfr
, orig_expr
;
4115 bool partial_cplx_access
= false;
4117 if (TREE_CODE (*expr
) == BIT_FIELD_REF
4118 && (write
|| !sra_handled_bf_read_p (*expr
)))
4121 expr
= &TREE_OPERAND (*expr
, 0);
4126 if (TREE_CODE (*expr
) == REALPART_EXPR
|| TREE_CODE (*expr
) == IMAGPART_EXPR
)
4128 expr
= &TREE_OPERAND (*expr
, 0);
4129 partial_cplx_access
= true;
4131 access
= get_access_for_expr (*expr
);
4134 type
= TREE_TYPE (*expr
);
4137 loc
= gimple_location (gsi_stmt (*stmt_gsi
));
4138 gimple_stmt_iterator alt_gsi
= gsi_none ();
4139 if (write
&& stmt_ends_bb_p (gsi_stmt (*stmt_gsi
)))
4141 alt_gsi
= gsi_start_edge (single_non_eh_succ (gsi_bb (*stmt_gsi
)));
4142 refresh_gsi
= &alt_gsi
;
4145 if (access
->grp_to_be_replaced
)
4147 tree repl
= get_access_replacement (access
);
4148 /* If we replace a non-register typed access simply use the original
4149 access expression to extract the scalar component afterwards.
4150 This happens if scalarizing a function return value or parameter
4151 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
4152 gcc.c-torture/compile/20011217-1.c.
4154 We also want to use this when accessing a complex or vector which can
4155 be accessed as a different type too, potentially creating a need for
4156 type conversion (see PR42196) and when scalarized unions are involved
4157 in assembler statements (see PR42398). */
4158 if (!bfr
&& !useless_type_conversion_p (type
, access
->type
))
4162 ref
= build_ref_for_model (loc
, orig_expr
, 0, access
, stmt_gsi
,
4165 if (partial_cplx_access
)
4167 /* VIEW_CONVERT_EXPRs in partial complex access are always fine in
4168 the case of a write because in such case the replacement cannot
4169 be a gimple register. In the case of a load, we have to
4170 differentiate in between a register an non-register
4172 tree t
= build1 (VIEW_CONVERT_EXPR
, type
, repl
);
4173 gcc_checking_assert (!write
|| access
->grp_partial_lhs
);
4174 if (!access
->grp_partial_lhs
)
4176 tree tmp
= make_ssa_name (type
);
4177 gassign
*stmt
= gimple_build_assign (tmp
, t
);
4178 /* This is always a read. */
4179 gsi_insert_before (stmt_gsi
, stmt
, GSI_SAME_STMT
);
4188 if (access
->grp_partial_lhs
)
4189 ref
= force_gimple_operand_gsi (refresh_gsi
, ref
, true,
4190 NULL_TREE
, false, GSI_NEW_STMT
);
4191 stmt
= gimple_build_assign (repl
, ref
);
4192 gimple_set_location (stmt
, loc
);
4193 gsi_insert_after (refresh_gsi
, stmt
, GSI_NEW_STMT
);
4199 if (access
->grp_partial_lhs
)
4200 repl
= force_gimple_operand_gsi (stmt_gsi
, repl
, true,
4203 stmt
= gimple_build_assign (ref
, repl
);
4204 gimple_set_location (stmt
, loc
);
4205 gsi_insert_before (stmt_gsi
, stmt
, GSI_SAME_STMT
);
4210 /* If we are going to replace a scalar field in a structure with
4211 reverse storage order by a stand-alone scalar, we are going to
4212 effectively byte-swap the scalar and we also need to byte-swap
4213 the portion of it represented by the bit-field. */
4214 if (bfr
&& REF_REVERSE_STORAGE_ORDER (bfr
))
4216 REF_REVERSE_STORAGE_ORDER (bfr
) = 0;
4217 TREE_OPERAND (bfr
, 2)
4218 = size_binop (MINUS_EXPR
, TYPE_SIZE (TREE_TYPE (repl
)),
4219 size_binop (PLUS_EXPR
, TREE_OPERAND (bfr
, 1),
4220 TREE_OPERAND (bfr
, 2)));
4228 else if (write
&& access
->grp_to_be_debug_replaced
)
4230 gdebug
*ds
= gimple_build_debug_bind (get_access_replacement (access
),
4232 gsi_stmt (*stmt_gsi
));
4233 gsi_insert_after (stmt_gsi
, ds
, GSI_NEW_STMT
);
4236 if (access
->first_child
&& !TREE_READONLY (access
->base
))
4238 HOST_WIDE_INT start_offset
, chunk_size
;
4240 && tree_fits_uhwi_p (TREE_OPERAND (bfr
, 1))
4241 && tree_fits_uhwi_p (TREE_OPERAND (bfr
, 2)))
4243 chunk_size
= tree_to_uhwi (TREE_OPERAND (bfr
, 1));
4244 start_offset
= access
->offset
4245 + tree_to_uhwi (TREE_OPERAND (bfr
, 2));
4248 start_offset
= chunk_size
= 0;
4250 generate_subtree_copies (access
->first_child
, orig_expr
, access
->offset
,
4251 start_offset
, chunk_size
,
4252 write
? refresh_gsi
: stmt_gsi
,
4258 /* If EXPR, which must be a call argument, is an ADDR_EXPR, generate writes and
4259 reads from its base before and after the call statement given in CALL_GSI
4260 and return true if any copying took place. Otherwise call sra_modify_expr
4261 on EXPR and return its value. FLAGS is what the gimple_call_arg_flags
4262 return for the given parameter. */
4265 sra_modify_call_arg (tree
*expr
, gimple_stmt_iterator
*call_gsi
,
4266 gimple_stmt_iterator
*refresh_gsi
, int flags
)
4268 if (TREE_CODE (*expr
) != ADDR_EXPR
)
4269 return sra_modify_expr (expr
, false, call_gsi
, refresh_gsi
);
4271 if (flags
& EAF_UNUSED
)
4274 tree base
= get_base_address (TREE_OPERAND (*expr
, 0));
4277 struct access
*access
= get_access_for_expr (base
);
4281 gimple
*stmt
= gsi_stmt (*call_gsi
);
4282 location_t loc
= gimple_location (stmt
);
4283 generate_subtree_copies (access
, base
, 0, 0, 0, call_gsi
, false, false,
4286 if (flags
& EAF_NO_DIRECT_CLOBBER
)
4289 if (!stmt_ends_bb_p (stmt
))
4290 generate_subtree_copies (access
, base
, 0, 0, 0, refresh_gsi
, true,
4296 FOR_EACH_EDGE (e
, ei
, gsi_bb (*call_gsi
)->succs
)
4298 gimple_stmt_iterator alt_gsi
= gsi_start_edge (e
);
4299 generate_subtree_copies (access
, base
, 0, 0, 0, &alt_gsi
, true,
4306 /* Where scalar replacements of the RHS have been written to when a replacement
4307 of a LHS of an assigments cannot be direclty loaded from a replacement of
4309 enum unscalarized_data_handling
{ SRA_UDH_NONE
, /* Nothing done so far. */
4310 SRA_UDH_RIGHT
, /* Data flushed to the RHS. */
4311 SRA_UDH_LEFT
}; /* Data flushed to the LHS. */
4313 struct subreplacement_assignment_data
4315 /* Offset of the access representing the lhs of the assignment. */
4316 HOST_WIDE_INT left_offset
;
4318 /* LHS and RHS of the original assignment. */
4319 tree assignment_lhs
, assignment_rhs
;
4321 /* Access representing the rhs of the whole assignment. */
4322 struct access
*top_racc
;
4324 /* Stmt iterator used for statement insertions after the original assignment.
4325 It points to the main GSI used to traverse a BB during function body
4327 gimple_stmt_iterator
*new_gsi
;
4329 /* Stmt iterator used for statement insertions before the original
4330 assignment. Keeps on pointing to the original statement. */
4331 gimple_stmt_iterator old_gsi
;
4333 /* Location of the assignment. */
4336 /* Keeps the information whether we have needed to refresh replacements of
4337 the LHS and from which side of the assignments this takes place. */
4338 enum unscalarized_data_handling refreshed
;
4341 /* Store all replacements in the access tree rooted in TOP_RACC either to their
4342 base aggregate if there are unscalarized data or directly to LHS of the
4343 statement that is pointed to by GSI otherwise. */
4346 handle_unscalarized_data_in_subtree (struct subreplacement_assignment_data
*sad
)
4349 /* If the RHS is a load from a constant, we do not need to (and must not)
4350 flush replacements to it and can use it directly as if we did. */
4351 if (TREE_READONLY (sad
->top_racc
->base
))
4353 sad
->refreshed
= SRA_UDH_RIGHT
;
4356 if (sad
->top_racc
->grp_unscalarized_data
)
4358 src
= sad
->assignment_rhs
;
4359 sad
->refreshed
= SRA_UDH_RIGHT
;
4363 src
= sad
->assignment_lhs
;
4364 sad
->refreshed
= SRA_UDH_LEFT
;
4366 generate_subtree_copies (sad
->top_racc
->first_child
, src
,
4367 sad
->top_racc
->offset
, 0, 0,
4368 &sad
->old_gsi
, false, false, sad
->loc
);
4371 /* Try to generate statements to load all sub-replacements in an access subtree
4372 formed by children of LACC from scalar replacements in the SAD->top_racc
4373 subtree. If that is not possible, refresh the SAD->top_racc base aggregate
4374 and load the accesses from it. */
4377 load_assign_lhs_subreplacements (struct access
*lacc
,
4378 struct subreplacement_assignment_data
*sad
)
4380 for (lacc
= lacc
->first_child
; lacc
; lacc
= lacc
->next_sibling
)
4382 HOST_WIDE_INT offset
;
4383 offset
= lacc
->offset
- sad
->left_offset
+ sad
->top_racc
->offset
;
4385 if (lacc
->grp_to_be_replaced
)
4387 struct access
*racc
;
4391 racc
= find_access_in_subtree (sad
->top_racc
, offset
, lacc
->size
);
4392 if (racc
&& racc
->grp_to_be_replaced
)
4394 rhs
= get_access_replacement (racc
);
4396 if (!useless_type_conversion_p (lacc
->type
, racc
->type
))
4398 rhs
= fold_build1_loc (sad
->loc
, VIEW_CONVERT_EXPR
,
4403 if (lacc
->grp_partial_lhs
&& (vce
|| racc
->grp_partial_lhs
))
4404 rhs
= force_gimple_operand_gsi (&sad
->old_gsi
, rhs
, true,
4405 NULL_TREE
, true, GSI_SAME_STMT
);
4409 /* No suitable access on the right hand side, need to load from
4410 the aggregate. See if we have to update it first... */
4411 if (sad
->refreshed
== SRA_UDH_NONE
)
4412 handle_unscalarized_data_in_subtree (sad
);
4414 if (sad
->refreshed
== SRA_UDH_LEFT
)
4415 rhs
= build_ref_for_model (sad
->loc
, sad
->assignment_lhs
,
4416 lacc
->offset
- sad
->left_offset
,
4417 lacc
, sad
->new_gsi
, true);
4419 rhs
= build_ref_for_model (sad
->loc
, sad
->assignment_rhs
,
4420 lacc
->offset
- sad
->left_offset
,
4421 lacc
, sad
->new_gsi
, true);
4422 if (lacc
->grp_partial_lhs
)
4423 rhs
= force_gimple_operand_gsi (sad
->new_gsi
,
4424 rhs
, true, NULL_TREE
,
4425 false, GSI_NEW_STMT
);
4428 stmt
= gimple_build_assign (get_access_replacement (lacc
), rhs
);
4429 gsi_insert_after (sad
->new_gsi
, stmt
, GSI_NEW_STMT
);
4430 gimple_set_location (stmt
, sad
->loc
);
4432 sra_stats
.subreplacements
++;
4436 if (sad
->refreshed
== SRA_UDH_NONE
4437 && lacc
->grp_read
&& !lacc
->grp_covered
)
4438 handle_unscalarized_data_in_subtree (sad
);
4440 if (lacc
&& lacc
->grp_to_be_debug_replaced
)
4444 struct access
*racc
= find_access_in_subtree (sad
->top_racc
,
4448 if (racc
&& racc
->grp_to_be_replaced
)
4450 if (racc
->grp_write
|| constant_decl_p (racc
->base
))
4451 drhs
= get_access_replacement (racc
);
4455 else if (sad
->refreshed
== SRA_UDH_LEFT
)
4456 drhs
= build_debug_ref_for_model (sad
->loc
, lacc
->base
,
4457 lacc
->offset
, lacc
);
4458 else if (sad
->refreshed
== SRA_UDH_RIGHT
)
4459 drhs
= build_debug_ref_for_model (sad
->loc
, sad
->top_racc
->base
,
4464 && !useless_type_conversion_p (lacc
->type
, TREE_TYPE (drhs
)))
4465 drhs
= fold_build1_loc (sad
->loc
, VIEW_CONVERT_EXPR
,
4467 ds
= gimple_build_debug_bind (get_access_replacement (lacc
),
4468 drhs
, gsi_stmt (sad
->old_gsi
));
4469 gsi_insert_after (sad
->new_gsi
, ds
, GSI_NEW_STMT
);
4473 if (lacc
->first_child
)
4474 load_assign_lhs_subreplacements (lacc
, sad
);
4478 /* Result code for SRA assignment modification. */
4479 enum assignment_mod_result
{ SRA_AM_NONE
, /* nothing done for the stmt */
4480 SRA_AM_MODIFIED
, /* stmt changed but not
4482 SRA_AM_REMOVED
}; /* stmt eliminated */
4484 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
4485 to the assignment and GSI is the statement iterator pointing at it. Returns
4486 the same values as sra_modify_assign. */
4488 static enum assignment_mod_result
4489 sra_modify_constructor_assign (gimple
*stmt
, gimple_stmt_iterator
*gsi
)
4491 tree lhs
= gimple_assign_lhs (stmt
);
4492 struct access
*acc
= get_access_for_expr (lhs
);
4495 location_t loc
= gimple_location (stmt
);
4497 if (gimple_clobber_p (stmt
))
4499 /* Clobber the replacement variable. */
4500 clobber_subtree (acc
, gsi
, !acc
->grp_covered
, loc
);
4501 /* Remove clobbers of fully scalarized variables, they are dead. */
4502 if (acc
->grp_covered
)
4504 unlink_stmt_vdef (stmt
);
4505 gsi_remove (gsi
, true);
4506 release_defs (stmt
);
4507 return SRA_AM_REMOVED
;
4510 return SRA_AM_MODIFIED
;
4513 if (CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt
)) > 0)
4515 /* I have never seen this code path trigger but if it can happen the
4516 following should handle it gracefully. */
4517 if (access_has_children_p (acc
))
4518 generate_subtree_copies (acc
->first_child
, lhs
, acc
->offset
, 0, 0, gsi
,
4520 return SRA_AM_MODIFIED
;
4523 if (acc
->grp_covered
)
4525 init_subtree_with_zero (acc
, gsi
, false, loc
);
4526 unlink_stmt_vdef (stmt
);
4527 gsi_remove (gsi
, true);
4528 release_defs (stmt
);
4529 return SRA_AM_REMOVED
;
4533 init_subtree_with_zero (acc
, gsi
, true, loc
);
4534 return SRA_AM_MODIFIED
;
4538 /* Create and return a new suitable default definition SSA_NAME for RACC which
4539 is an access describing an uninitialized part of an aggregate that is being
4540 loaded. REG_TREE is used instead of the actual RACC type if that is not of
4541 a gimple register type. */
4544 get_repl_default_def_ssa_name (struct access
*racc
, tree reg_type
)
4546 gcc_checking_assert (!racc
->grp_to_be_replaced
4547 && !racc
->grp_to_be_debug_replaced
);
4548 if (!racc
->replacement_decl
)
4549 racc
->replacement_decl
= create_access_replacement (racc
, reg_type
);
4550 return get_or_create_ssa_default_def (cfun
, racc
->replacement_decl
);
4554 /* Generate statements to call .DEFERRED_INIT to initialize scalar replacements
4555 of accesses within a subtree ACCESS; all its children, siblings and their
4556 children are to be processed.
4557 GSI is a statement iterator used to place the new statements. */
4559 generate_subtree_deferred_init (struct access
*access
,
4562 gimple_stmt_iterator
*gsi
,
4567 if (access
->grp_to_be_replaced
)
4569 tree repl
= get_access_replacement (access
);
4571 = gimple_build_call_internal (IFN_DEFERRED_INIT
, 3,
4572 TYPE_SIZE_UNIT (TREE_TYPE (repl
)),
4573 init_type
, decl_name
);
4574 gimple_call_set_lhs (call
, repl
);
4575 gsi_insert_before (gsi
, call
, GSI_SAME_STMT
);
4577 gimple_set_location (call
, loc
);
4578 sra_stats
.subtree_deferred_init
++;
4580 if (access
->first_child
)
4581 generate_subtree_deferred_init (access
->first_child
, init_type
,
4582 decl_name
, gsi
, loc
);
4584 access
= access
->next_sibling
;
4589 /* For a call to .DEFERRED_INIT:
4590 var = .DEFERRED_INIT (size_of_var, init_type, name_of_var);
4591 examine the LHS variable VAR and replace it with a scalar replacement if
4592 there is one, also replace the RHS call to a call to .DEFERRED_INIT of
4593 the corresponding scalar relacement variable. Examine the subtree and
4594 do the scalar replacements in the subtree too. STMT is the call, GSI is
4595 the statment iterator to place newly created statement. */
4597 static enum assignment_mod_result
4598 sra_modify_deferred_init (gimple
*stmt
, gimple_stmt_iterator
*gsi
)
4600 tree lhs
= gimple_call_lhs (stmt
);
4601 tree init_type
= gimple_call_arg (stmt
, 1);
4602 tree decl_name
= gimple_call_arg (stmt
, 2);
4604 struct access
*lhs_access
= get_access_for_expr (lhs
);
4608 location_t loc
= gimple_location (stmt
);
4610 if (lhs_access
->grp_to_be_replaced
)
4612 tree lhs_repl
= get_access_replacement (lhs_access
);
4613 gimple_call_set_lhs (stmt
, lhs_repl
);
4614 tree arg0_repl
= TYPE_SIZE_UNIT (TREE_TYPE (lhs_repl
));
4615 gimple_call_set_arg (stmt
, 0, arg0_repl
);
4616 sra_stats
.deferred_init
++;
4617 gcc_assert (!lhs_access
->first_child
);
4618 return SRA_AM_MODIFIED
;
4621 if (lhs_access
->first_child
)
4622 generate_subtree_deferred_init (lhs_access
->first_child
,
4623 init_type
, decl_name
, gsi
, loc
);
4624 if (lhs_access
->grp_covered
)
4626 unlink_stmt_vdef (stmt
);
4627 gsi_remove (gsi
, true);
4628 release_defs (stmt
);
4629 return SRA_AM_REMOVED
;
4632 return SRA_AM_MODIFIED
;
4635 /* Examine both sides of the assignment statement pointed to by STMT, replace
4636 them with a scalare replacement if there is one and generate copying of
4637 replacements if scalarized aggregates have been used in the assignment. GSI
4638 is used to hold generated statements for type conversions and subtree
4641 static enum assignment_mod_result
4642 sra_modify_assign (gimple
*stmt
, gimple_stmt_iterator
*gsi
)
4644 struct access
*lacc
, *racc
;
4646 bool modify_this_stmt
= false;
4647 bool force_gimple_rhs
= false;
4649 gimple_stmt_iterator orig_gsi
= *gsi
;
4651 if (!gimple_assign_single_p (stmt
))
4653 lhs
= gimple_assign_lhs (stmt
);
4654 rhs
= gimple_assign_rhs1 (stmt
);
4656 if (TREE_CODE (rhs
) == CONSTRUCTOR
)
4657 return sra_modify_constructor_assign (stmt
, gsi
);
4659 if (TREE_CODE (rhs
) == REALPART_EXPR
|| TREE_CODE (lhs
) == REALPART_EXPR
4660 || TREE_CODE (rhs
) == IMAGPART_EXPR
|| TREE_CODE (lhs
) == IMAGPART_EXPR
4661 || (TREE_CODE (rhs
) == BIT_FIELD_REF
&& !sra_handled_bf_read_p (rhs
))
4662 || TREE_CODE (lhs
) == BIT_FIELD_REF
)
4664 modify_this_stmt
= sra_modify_expr (gimple_assign_rhs1_ptr (stmt
),
4666 modify_this_stmt
|= sra_modify_expr (gimple_assign_lhs_ptr (stmt
),
4668 return modify_this_stmt
? SRA_AM_MODIFIED
: SRA_AM_NONE
;
4671 lacc
= get_access_for_expr (lhs
);
4672 racc
= get_access_for_expr (rhs
);
4675 /* Avoid modifying initializations of constant-pool replacements. */
4676 if (racc
&& (racc
->replacement_decl
== lhs
))
4679 loc
= gimple_location (stmt
);
4680 if (lacc
&& lacc
->grp_to_be_replaced
)
4682 lhs
= get_access_replacement (lacc
);
4683 gimple_assign_set_lhs (stmt
, lhs
);
4684 modify_this_stmt
= true;
4685 if (lacc
->grp_partial_lhs
)
4686 force_gimple_rhs
= true;
4690 if (racc
&& racc
->grp_to_be_replaced
)
4692 rhs
= get_access_replacement (racc
);
4693 modify_this_stmt
= true;
4694 if (racc
->grp_partial_lhs
)
4695 force_gimple_rhs
= true;
4699 && !racc
->grp_unscalarized_data
4700 && !racc
->grp_unscalarizable_region
4701 && TREE_CODE (lhs
) == SSA_NAME
4702 && !access_has_replacements_p (racc
))
4704 rhs
= get_repl_default_def_ssa_name (racc
, TREE_TYPE (lhs
));
4705 modify_this_stmt
= true;
4709 if (modify_this_stmt
4710 && !useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (rhs
)))
4712 /* If we can avoid creating a VIEW_CONVERT_EXPR, then do so.
4713 ??? This should move to fold_stmt which we simply should
4714 call after building a VIEW_CONVERT_EXPR here. */
4715 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs
))
4716 && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (lhs
)) == racc
->reverse
4717 && !contains_bitfld_component_ref_p (lhs
))
4719 lhs
= build_ref_for_model (loc
, lhs
, 0, racc
, gsi
, false);
4720 gimple_assign_set_lhs (stmt
, lhs
);
4723 && AGGREGATE_TYPE_P (TREE_TYPE (rhs
))
4724 && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (rhs
)) == lacc
->reverse
4725 && !contains_vce_or_bfcref_p (rhs
))
4726 rhs
= build_ref_for_model (loc
, rhs
, 0, lacc
, gsi
, false);
4728 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (rhs
)))
4730 rhs
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, TREE_TYPE (lhs
), rhs
);
4731 if (is_gimple_reg_type (TREE_TYPE (lhs
))
4732 && TREE_CODE (lhs
) != SSA_NAME
)
4733 force_gimple_rhs
= true;
4737 if (lacc
&& lacc
->grp_to_be_debug_replaced
)
4739 tree dlhs
= get_access_replacement (lacc
);
4740 tree drhs
= unshare_expr (rhs
);
4741 if (!useless_type_conversion_p (TREE_TYPE (dlhs
), TREE_TYPE (drhs
)))
4743 if (AGGREGATE_TYPE_P (TREE_TYPE (drhs
))
4744 && !contains_vce_or_bfcref_p (drhs
))
4745 drhs
= build_debug_ref_for_model (loc
, drhs
, 0, lacc
);
4747 && !useless_type_conversion_p (TREE_TYPE (dlhs
),
4749 drhs
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
4750 TREE_TYPE (dlhs
), drhs
);
4752 gdebug
*ds
= gimple_build_debug_bind (dlhs
, drhs
, stmt
);
4753 gsi_insert_before (gsi
, ds
, GSI_SAME_STMT
);
4756 /* From this point on, the function deals with assignments in between
4757 aggregates when at least one has scalar reductions of some of its
4758 components. There are three possible scenarios: Both the LHS and RHS have
4759 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
4761 In the first case, we would like to load the LHS components from RHS
4762 components whenever possible. If that is not possible, we would like to
4763 read it directly from the RHS (after updating it by storing in it its own
4764 components). If there are some necessary unscalarized data in the LHS,
4765 those will be loaded by the original assignment too. If neither of these
4766 cases happen, the original statement can be removed. Most of this is done
4767 by load_assign_lhs_subreplacements.
4769 In the second case, we would like to store all RHS scalarized components
4770 directly into LHS and if they cover the aggregate completely, remove the
4771 statement too. In the third case, we want the LHS components to be loaded
4772 directly from the RHS (DSE will remove the original statement if it
4775 This is a bit complex but manageable when types match and when unions do
4776 not cause confusion in a way that we cannot really load a component of LHS
4777 from the RHS or vice versa (the access representing this level can have
4778 subaccesses that are accessible only through a different union field at a
4779 higher level - different from the one used in the examined expression).
4782 Therefore, I specially handle a fourth case, happening when there is a
4783 specific type cast or it is impossible to locate a scalarized subaccess on
4784 the other side of the expression. If that happens, I simply "refresh" the
4785 RHS by storing in it is scalarized components leave the original statement
4786 there to do the copying and then load the scalar replacements of the LHS.
4787 This is what the first branch does. */
4789 if (modify_this_stmt
4790 || gimple_has_volatile_ops (stmt
)
4791 || contains_vce_or_bfcref_p (rhs
)
4792 || contains_vce_or_bfcref_p (lhs
)
4793 || stmt_ends_bb_p (stmt
))
4795 /* No need to copy into a constant, it comes pre-initialized. */
4796 if (access_has_children_p (racc
) && !TREE_READONLY (racc
->base
))
4797 generate_subtree_copies (racc
->first_child
, rhs
, racc
->offset
, 0, 0,
4798 gsi
, false, false, loc
);
4799 if (access_has_children_p (lacc
))
4801 gimple_stmt_iterator alt_gsi
= gsi_none ();
4802 if (stmt_ends_bb_p (stmt
))
4804 alt_gsi
= gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi
)));
4807 generate_subtree_copies (lacc
->first_child
, lhs
, lacc
->offset
, 0, 0,
4808 gsi
, true, true, loc
);
4810 sra_stats
.separate_lhs_rhs_handling
++;
4812 /* This gimplification must be done after generate_subtree_copies,
4813 lest we insert the subtree copies in the middle of the gimplified
4815 if (force_gimple_rhs
)
4816 rhs
= force_gimple_operand_gsi (&orig_gsi
, rhs
, true, NULL_TREE
,
4817 true, GSI_SAME_STMT
);
4818 if (gimple_assign_rhs1 (stmt
) != rhs
)
4820 modify_this_stmt
= true;
4821 gimple_assign_set_rhs_from_tree (&orig_gsi
, rhs
);
4822 gcc_assert (stmt
== gsi_stmt (orig_gsi
));
4825 return modify_this_stmt
? SRA_AM_MODIFIED
: SRA_AM_NONE
;
4829 if (access_has_children_p (lacc
)
4830 && access_has_children_p (racc
)
4831 /* When an access represents an unscalarizable region, it usually
4832 represents accesses with variable offset and thus must not be used
4833 to generate new memory accesses. */
4834 && !lacc
->grp_unscalarizable_region
4835 && !racc
->grp_unscalarizable_region
)
4837 struct subreplacement_assignment_data sad
;
4839 sad
.left_offset
= lacc
->offset
;
4840 sad
.assignment_lhs
= lhs
;
4841 sad
.assignment_rhs
= rhs
;
4842 sad
.top_racc
= racc
;
4845 sad
.loc
= gimple_location (stmt
);
4846 sad
.refreshed
= SRA_UDH_NONE
;
4848 if (lacc
->grp_read
&& !lacc
->grp_covered
)
4849 handle_unscalarized_data_in_subtree (&sad
);
4851 load_assign_lhs_subreplacements (lacc
, &sad
);
4852 if (sad
.refreshed
!= SRA_UDH_RIGHT
)
4855 unlink_stmt_vdef (stmt
);
4856 gsi_remove (&sad
.old_gsi
, true);
4857 release_defs (stmt
);
4858 sra_stats
.deleted
++;
4859 return SRA_AM_REMOVED
;
4864 if (access_has_children_p (racc
)
4865 && !racc
->grp_unscalarized_data
4866 && TREE_CODE (lhs
) != SSA_NAME
)
4870 fprintf (dump_file
, "Removing load: ");
4871 print_gimple_stmt (dump_file
, stmt
, 0);
4873 generate_subtree_copies (racc
->first_child
, lhs
,
4874 racc
->offset
, 0, 0, gsi
,
4876 gcc_assert (stmt
== gsi_stmt (*gsi
));
4877 unlink_stmt_vdef (stmt
);
4878 gsi_remove (gsi
, true);
4879 release_defs (stmt
);
4880 sra_stats
.deleted
++;
4881 return SRA_AM_REMOVED
;
4883 /* Restore the aggregate RHS from its components so the
4884 prevailing aggregate copy does the right thing. */
4885 if (access_has_children_p (racc
) && !TREE_READONLY (racc
->base
))
4886 generate_subtree_copies (racc
->first_child
, rhs
, racc
->offset
, 0, 0,
4887 gsi
, false, false, loc
);
4888 /* Re-load the components of the aggregate copy destination.
4889 But use the RHS aggregate to load from to expose more
4890 optimization opportunities. */
4891 if (access_has_children_p (lacc
))
4893 generate_subtree_copies (lacc
->first_child
, rhs
, lacc
->offset
,
4894 0, 0, gsi
, true, true, loc
);
4895 if (lacc
->grp_covered
)
4897 unlink_stmt_vdef (stmt
);
4898 gsi_remove (& orig_gsi
, true);
4899 release_defs (stmt
);
4900 sra_stats
.deleted
++;
4901 return SRA_AM_REMOVED
;
4910 /* Set any scalar replacements of values in the constant pool to the initial
4911 value of the constant. (Constant-pool decls like *.LC0 have effectively
4912 been initialized before the program starts, we must do the same for their
4913 replacements.) Thus, we output statements like 'SR.1 = *.LC0[0];' into
4914 the function's entry block. */
4917 initialize_constant_pool_replacements (void)
4919 gimple_seq seq
= NULL
;
4920 gimple_stmt_iterator gsi
= gsi_start (seq
);
4924 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap
, 0, i
, bi
)
4926 tree var
= candidate (i
);
4927 if (!constant_decl_p (var
))
4930 struct access
*access
= get_first_repr_for_decl (var
);
4934 if (access
->replacement_decl
)
4937 = gimple_build_assign (get_access_replacement (access
),
4938 unshare_expr (access
->expr
));
4939 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4941 fprintf (dump_file
, "Generating constant initializer: ");
4942 print_gimple_stmt (dump_file
, stmt
, 0);
4943 fprintf (dump_file
, "\n");
4945 gsi_insert_after (&gsi
, stmt
, GSI_NEW_STMT
);
4949 if (access
->first_child
)
4950 access
= access
->first_child
;
4951 else if (access
->next_sibling
)
4952 access
= access
->next_sibling
;
4955 while (access
->parent
&& !access
->next_sibling
)
4956 access
= access
->parent
;
4957 if (access
->next_sibling
)
4958 access
= access
->next_sibling
;
4960 access
= access
->next_grp
;
4965 seq
= gsi_seq (gsi
);
4967 gsi_insert_seq_on_edge_immediate (
4968 single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
)), seq
);
4971 /* Traverse the function body and all modifications as decided in
4972 analyze_all_variable_accesses. Return true iff the CFG has been
4976 sra_modify_function_body (void)
4978 bool cfg_changed
= false;
4981 initialize_constant_pool_replacements ();
4983 FOR_EACH_BB_FN (bb
, cfun
)
4985 gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
4986 while (!gsi_end_p (gsi
))
4988 gimple
*stmt
= gsi_stmt (gsi
);
4989 enum assignment_mod_result assign_result
;
4990 bool modified
= false, deleted
= false;
4994 switch (gimple_code (stmt
))
4997 t
= gimple_return_retval_ptr (as_a
<greturn
*> (stmt
));
4998 if (*t
!= NULL_TREE
)
4999 modified
|= sra_modify_expr (t
, false, &gsi
, &gsi
);
5003 assign_result
= sra_modify_assign (stmt
, &gsi
);
5004 modified
|= assign_result
== SRA_AM_MODIFIED
;
5005 deleted
= assign_result
== SRA_AM_REMOVED
;
5009 /* Handle calls to .DEFERRED_INIT specially. */
5010 if (gimple_call_internal_p (stmt
, IFN_DEFERRED_INIT
))
5012 assign_result
= sra_modify_deferred_init (stmt
, &gsi
);
5013 modified
|= assign_result
== SRA_AM_MODIFIED
;
5014 deleted
= assign_result
== SRA_AM_REMOVED
;
5018 gcall
*call
= as_a
<gcall
*> (stmt
);
5019 gimple_stmt_iterator call_gsi
= gsi
;
5021 /* Operands must be processed before the lhs. */
5022 for (i
= 0; i
< gimple_call_num_args (call
); i
++)
5024 int flags
= gimple_call_arg_flags (call
, i
);
5025 t
= gimple_call_arg_ptr (call
, i
);
5026 modified
|= sra_modify_call_arg (t
, &call_gsi
, &gsi
, flags
);
5028 if (gimple_call_chain (call
))
5030 t
= gimple_call_chain_ptr (call
);
5031 int flags
= gimple_call_static_chain_flags (call
);
5032 modified
|= sra_modify_call_arg (t
, &call_gsi
, &gsi
,
5035 if (gimple_call_lhs (call
))
5037 t
= gimple_call_lhs_ptr (call
);
5038 modified
|= sra_modify_expr (t
, true, &call_gsi
, &gsi
);
5045 gimple_stmt_iterator stmt_gsi
= gsi
;
5046 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
5047 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); i
++)
5049 t
= &TREE_VALUE (gimple_asm_input_op (asm_stmt
, i
));
5050 modified
|= sra_modify_expr (t
, false, &stmt_gsi
, &gsi
);
5052 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); i
++)
5054 t
= &TREE_VALUE (gimple_asm_output_op (asm_stmt
, i
));
5055 modified
|= sra_modify_expr (t
, true, &stmt_gsi
, &gsi
);
5067 if (maybe_clean_eh_stmt (stmt
)
5068 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
5076 gsi_commit_edge_inserts ();
5080 /* Generate statements initializing scalar replacements of parts of function
5084 initialize_parameter_reductions (void)
5086 gimple_stmt_iterator gsi
;
5087 gimple_seq seq
= NULL
;
5090 gsi
= gsi_start (seq
);
5091 for (parm
= DECL_ARGUMENTS (current_function_decl
);
5093 parm
= DECL_CHAIN (parm
))
5095 vec
<access_p
> *access_vec
;
5096 struct access
*access
;
5098 if (!bitmap_bit_p (candidate_bitmap
, DECL_UID (parm
)))
5100 access_vec
= get_base_access_vector (parm
);
5104 for (access
= (*access_vec
)[0];
5106 access
= access
->next_grp
)
5107 generate_subtree_copies (access
, parm
, 0, 0, 0, &gsi
, true, true,
5108 EXPR_LOCATION (parm
));
5111 seq
= gsi_seq (gsi
);
5113 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
)), seq
);
5116 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
5117 it reveals there are components of some aggregates to be scalarized, it runs
5118 the required transformations. */
5120 perform_intra_sra (void)
5125 if (!find_var_candidates ())
5128 if (!scan_function ())
5131 if (!analyze_all_variable_accesses ())
5134 if (sra_modify_function_body ())
5135 ret
= TODO_update_ssa
| TODO_cleanup_cfg
;
5137 ret
= TODO_update_ssa
;
5138 initialize_parameter_reductions ();
5140 statistics_counter_event (cfun
, "Scalar replacements created",
5141 sra_stats
.replacements
);
5142 statistics_counter_event (cfun
, "Modified expressions", sra_stats
.exprs
);
5143 statistics_counter_event (cfun
, "Subtree copy stmts",
5144 sra_stats
.subtree_copies
);
5145 statistics_counter_event (cfun
, "Subreplacement stmts",
5146 sra_stats
.subreplacements
);
5147 statistics_counter_event (cfun
, "Deleted stmts", sra_stats
.deleted
);
5148 statistics_counter_event (cfun
, "Separate LHS and RHS handling",
5149 sra_stats
.separate_lhs_rhs_handling
);
5152 sra_deinitialize ();
5156 /* Perform early intraprocedural SRA. */
5158 early_intra_sra (void)
5160 sra_mode
= SRA_MODE_EARLY_INTRA
;
5161 return perform_intra_sra ();
5164 /* Perform "late" intraprocedural SRA. */
5166 late_intra_sra (void)
5168 sra_mode
= SRA_MODE_INTRA
;
5169 return perform_intra_sra ();
5174 gate_intra_sra (void)
5176 return flag_tree_sra
!= 0 && dbg_cnt (tree_sra
);
5182 const pass_data pass_data_sra_early
=
5184 GIMPLE_PASS
, /* type */
5186 OPTGROUP_NONE
, /* optinfo_flags */
5187 TV_TREE_SRA
, /* tv_id */
5188 ( PROP_cfg
| PROP_ssa
), /* properties_required */
5189 0, /* properties_provided */
5190 0, /* properties_destroyed */
5191 0, /* todo_flags_start */
5192 TODO_update_ssa
, /* todo_flags_finish */
5195 class pass_sra_early
: public gimple_opt_pass
5198 pass_sra_early (gcc::context
*ctxt
)
5199 : gimple_opt_pass (pass_data_sra_early
, ctxt
)
5202 /* opt_pass methods: */
5203 bool gate (function
*) final override
{ return gate_intra_sra (); }
5204 unsigned int execute (function
*) final override
5206 return early_intra_sra ();
5209 }; // class pass_sra_early
5214 make_pass_sra_early (gcc::context
*ctxt
)
5216 return new pass_sra_early (ctxt
);
5221 const pass_data pass_data_sra
=
5223 GIMPLE_PASS
, /* type */
5225 OPTGROUP_NONE
, /* optinfo_flags */
5226 TV_TREE_SRA
, /* tv_id */
5227 ( PROP_cfg
| PROP_ssa
), /* properties_required */
5228 0, /* properties_provided */
5229 0, /* properties_destroyed */
5230 TODO_update_address_taken
, /* todo_flags_start */
5231 TODO_update_ssa
, /* todo_flags_finish */
5234 class pass_sra
: public gimple_opt_pass
5237 pass_sra (gcc::context
*ctxt
)
5238 : gimple_opt_pass (pass_data_sra
, ctxt
)
5241 /* opt_pass methods: */
5242 bool gate (function
*) final override
{ return gate_intra_sra (); }
5243 unsigned int execute (function
*) final override
{ return late_intra_sra (); }
5245 }; // class pass_sra
5250 make_pass_sra (gcc::context
*ctxt
)
5252 return new pass_sra (ctxt
);
5256 /* If type T cannot be totally scalarized, return false. Otherwise return true
5257 and push to the vector within PC offsets and lengths of all padding in the
5258 type as total scalarization would encounter it. */
5261 check_ts_and_push_padding_to_vec (tree type
, sra_padding_collecting
*pc
)
5263 if (!totally_scalarizable_type_p (type
, true /* optimistic value */,
5267 pc
->record_padding (tree_to_shwi (TYPE_SIZE (type
)));
5271 /* Given two types in an assignment, return true either if any one cannot be
5272 totally scalarized or if they have padding (i.e. not copied bits) */
5275 sra_total_scalarization_would_copy_same_data_p (tree t1
, tree t2
)
5277 sra_padding_collecting p1
;
5278 if (!check_ts_and_push_padding_to_vec (t1
, &p1
))
5281 sra_padding_collecting p2
;
5282 if (!check_ts_and_push_padding_to_vec (t2
, &p2
))
5285 unsigned l
= p1
.m_padding
.length ();
5286 if (l
!= p2
.m_padding
.length ())
5288 for (unsigned i
= 0; i
< l
; i
++)
5289 if (p1
.m_padding
[i
].first
!= p2
.m_padding
[i
].first
5290 || p1
.m_padding
[i
].second
!= p2
.m_padding
[i
].second
)