1 /* SSA operands management for trees.
2 Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
23 #include "coretypes.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-inline.h"
31 #include "tree-pass.h"
35 #include "langhooks.h"
36 #include "ipa-reference.h"
38 /* This file contains the code required to manage the operands cache of the
39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
40 annotation. This cache contains operands that will be of interest to
41 optimizers and other passes wishing to manipulate the IL.
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
50 The routines in this file are concerned with creating this operand cache
53 The operand tree is the parsed by the various get_* routines which look
54 through the stmt tree for the occurrence of operands which may be of
55 interest, and calls are made to the append_* routines whenever one is
56 found. There are 5 of these routines, each representing one of the
57 5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and
60 The append_* routines check for duplication, and simply keep a list of
61 unique objects for each operand type in the build_* extendable vectors.
63 Once the stmt tree is completely parsed, the finalize_ssa_operands()
64 routine is called, which proceeds to perform the finalization routine
65 on each of the 5 operand vectors which have been built up.
67 If the stmt had a previous operand cache, the finalization routines
68 attempt to match up the new operands with the old ones. If it's a perfect
69 match, the old vector is simply reused. If it isn't a perfect match, then
70 a new vector is created and the new operands are placed there. For
71 virtual operands, if the previous cache had SSA_NAME version of a
72 variable, and that same variable occurs in the same operands cache, then
73 the new cache vector will also get the same SSA_NAME.
75 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
76 vector for VUSE, then the new vector will also be modified such that
77 it contains 'a_5' rather than 'a'.
82 /* Flags to describe operand properties in helpers. */
84 /* By default, operands are loaded. */
87 /* Operand is the target of an assignment expression or a
88 call-clobbered variable */
89 #define opf_is_def (1 << 0)
91 /* Operand is the target of an assignment expression. */
92 #define opf_kill_def (1 << 1)
94 /* No virtual operands should be created in the expression. This is used
95 when traversing ADDR_EXPR nodes which have different semantics than
96 other expressions. Inside an ADDR_EXPR node, the only operands that we
97 need to consider are indices into arrays. For instance, &a.b[i] should
98 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
100 #define opf_no_vops (1 << 2)
102 /* Operand is a "non-specific" kill for call-clobbers and such. This is used
103 to distinguish "reset the world" events from explicit MODIFY_EXPRs. */
104 #define opf_non_specific (1 << 3)
107 /* Array for building all the def operands. */
108 static VEC(tree
,heap
) *build_defs
;
110 /* Array for building all the use operands. */
111 static VEC(tree
,heap
) *build_uses
;
113 /* Array for building all the v_may_def operands. */
114 static VEC(tree
,heap
) *build_v_may_defs
;
116 /* Array for building all the vuse operands. */
117 static VEC(tree
,heap
) *build_vuses
;
119 /* Array for building all the v_must_def operands. */
120 static VEC(tree
,heap
) *build_v_must_defs
;
122 /* True if the operands for call clobbered vars are cached and valid. */
123 bool ssa_call_clobbered_cache_valid
;
124 bool ssa_ro_call_cache_valid
;
126 /* These arrays are the cached operand vectors for call clobbered calls. */
127 static VEC(tree
,heap
) *clobbered_v_may_defs
;
128 static VEC(tree
,heap
) *clobbered_vuses
;
129 static VEC(tree
,heap
) *ro_call_vuses
;
130 static bool clobbered_aliased_loads
;
131 static bool clobbered_aliased_stores
;
132 static bool ro_call_aliased_loads
;
133 static bool ops_active
= false;
135 static GTY (()) struct ssa_operand_memory_d
*operand_memory
= NULL
;
136 static unsigned operand_memory_index
;
138 static void get_expr_operands (tree
, tree
*, int);
139 static void get_asm_expr_operands (tree
);
140 static void get_indirect_ref_operands (tree
, tree
, int);
141 static void get_tmr_operands (tree
, tree
, int);
142 static void get_call_expr_operands (tree
, tree
);
143 static inline void append_def (tree
*);
144 static inline void append_use (tree
*);
145 static void append_v_may_def (tree
);
146 static void append_v_must_def (tree
);
147 static void add_call_clobber_ops (tree
, tree
);
148 static void add_call_read_ops (tree
);
149 static void add_stmt_operand (tree
*, stmt_ann_t
, int);
150 static void build_ssa_operands (tree stmt
);
152 static def_optype_p free_defs
= NULL
;
153 static use_optype_p free_uses
= NULL
;
154 static vuse_optype_p free_vuses
= NULL
;
155 static maydef_optype_p free_maydefs
= NULL
;
156 static mustdef_optype_p free_mustdefs
= NULL
;
159 /* Return the DECL_UID of the base variable of T. */
161 static inline unsigned
162 get_name_decl (tree t
)
164 if (TREE_CODE (t
) != SSA_NAME
)
167 return DECL_UID (SSA_NAME_VAR (t
));
170 /* Comparison function for qsort used in operand_build_sort_virtual. */
173 operand_build_cmp (const void *p
, const void *q
)
175 tree e1
= *((const tree
*)p
);
176 tree e2
= *((const tree
*)q
);
179 u1
= get_name_decl (e1
);
180 u2
= get_name_decl (e2
);
182 /* We want to sort in ascending order. They can never be equal. */
183 #ifdef ENABLE_CHECKING
184 gcc_assert (u1
!= u2
);
186 return (u1
> u2
? 1 : -1);
189 /* Sort the virtual operands in LIST from lowest DECL_UID to highest. */
192 operand_build_sort_virtual (VEC(tree
,heap
) *list
)
194 int num
= VEC_length (tree
, list
);
199 if (get_name_decl (VEC_index (tree
, list
, 0))
200 > get_name_decl (VEC_index (tree
, list
, 1)))
202 /* Swap elements if in the wrong order. */
203 tree tmp
= VEC_index (tree
, list
, 0);
204 VEC_replace (tree
, list
, 0, VEC_index (tree
, list
, 1));
205 VEC_replace (tree
, list
, 1, tmp
);
209 /* There are 3 or more elements, call qsort. */
210 qsort (VEC_address (tree
, list
),
211 VEC_length (tree
, list
),
218 /* Return true if the ssa operands cache is active. */
221 ssa_operands_active (void)
227 /* Initialize the operand cache routines. */
230 init_ssa_operands (void)
232 build_defs
= VEC_alloc (tree
, heap
, 5);
233 build_uses
= VEC_alloc (tree
, heap
, 10);
234 build_vuses
= VEC_alloc (tree
, heap
, 25);
235 build_v_may_defs
= VEC_alloc (tree
, heap
, 25);
236 build_v_must_defs
= VEC_alloc (tree
, heap
, 25);
238 gcc_assert (operand_memory
== NULL
);
239 operand_memory_index
= SSA_OPERAND_MEMORY_SIZE
;
244 /* Dispose of anything required by the operand routines. */
247 fini_ssa_operands (void)
249 struct ssa_operand_memory_d
*ptr
;
250 VEC_free (tree
, heap
, build_defs
);
251 VEC_free (tree
, heap
, build_uses
);
252 VEC_free (tree
, heap
, build_v_must_defs
);
253 VEC_free (tree
, heap
, build_v_may_defs
);
254 VEC_free (tree
, heap
, build_vuses
);
259 free_mustdefs
= NULL
;
260 while ((ptr
= operand_memory
) != NULL
)
262 operand_memory
= operand_memory
->next
;
266 VEC_free (tree
, heap
, clobbered_v_may_defs
);
267 VEC_free (tree
, heap
, clobbered_vuses
);
268 VEC_free (tree
, heap
, ro_call_vuses
);
273 /* Return memory for operands of SIZE chunks. */
276 ssa_operand_alloc (unsigned size
)
279 if (operand_memory_index
+ size
>= SSA_OPERAND_MEMORY_SIZE
)
281 struct ssa_operand_memory_d
*ptr
;
282 ptr
= ggc_alloc (sizeof (struct ssa_operand_memory_d
));
283 ptr
->next
= operand_memory
;
284 operand_memory
= ptr
;
285 operand_memory_index
= 0;
287 ptr
= &(operand_memory
->mem
[operand_memory_index
]);
288 operand_memory_index
+= size
;
293 /* Make sure PTR is in the correct immediate use list. Since uses are simply
294 pointers into the stmt TREE, there is no way of telling if anyone has
295 changed what this pointer points to via TREE_OPERANDS (exp, 0) = <...>.
296 The contents are different, but the pointer is still the same. This
297 routine will check to make sure PTR is in the correct list, and if it isn't
298 put it in the correct list. We cannot simply check the previous node
299 because all nodes in the same stmt might have be changed. */
302 correct_use_link (use_operand_p ptr
, tree stmt
)
307 /* Fold_stmt () may have changed the stmt pointers. */
308 if (ptr
->stmt
!= stmt
)
314 /* Find the root element, making sure we skip any safe iterators. */
315 while (prev
->use
!= NULL
|| prev
->stmt
== NULL
)
318 /* Get the ssa_name of the list the node is in. */
320 /* If it's the right list, simply return. */
321 if (root
== *(ptr
->use
))
324 /* Its in the wrong list if we reach here. */
325 delink_imm_use (ptr
);
326 link_imm_use (ptr
, *(ptr
->use
));
330 /* This routine makes sure that PTR is in an immediate use list, and makes
331 sure the stmt pointer is set to the current stmt. Virtual uses do not need
332 the overhead of correct_use_link since they cannot be directly manipulated
333 like a real use can be. (They don't exist in the TREE_OPERAND nodes.) */
335 set_virtual_use_link (use_operand_p ptr
, tree stmt
)
337 /* Fold_stmt () may have changed the stmt pointers. */
338 if (ptr
->stmt
!= stmt
)
341 /* If this use isn't in a list, add it to the correct list. */
343 link_imm_use (ptr
, *(ptr
->use
));
348 #define FINALIZE_OPBUILD build_defs
349 #define FINALIZE_OPBUILD_BASE(I) (tree *)VEC_index (tree, \
351 #define FINALIZE_OPBUILD_ELEM(I) (tree *)VEC_index (tree, \
353 #define FINALIZE_FUNC finalize_ssa_def_ops
354 #define FINALIZE_ALLOC alloc_def
355 #define FINALIZE_FREE free_defs
356 #define FINALIZE_TYPE struct def_optype_d
357 #define FINALIZE_ELEM(PTR) ((PTR)->def_ptr)
358 #define FINALIZE_OPS DEF_OPS
359 #define FINALIZE_BASE(VAR) VAR
360 #define FINALIZE_BASE_TYPE tree *
361 #define FINALIZE_BASE_ZERO NULL
362 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) FINALIZE_ELEM (PTR) = (VAL)
363 #include "tree-ssa-opfinalize.h"
366 /* This routine will create stmt operands for STMT from the def build list. */
369 finalize_ssa_defs (tree stmt
)
371 unsigned int num
= VEC_length (tree
, build_defs
);
372 /* There should only be a single real definition per assignment. */
373 gcc_assert ((stmt
&& TREE_CODE (stmt
) != MODIFY_EXPR
) || num
<= 1);
375 /* If there is an old list, often the new list is identical, or close, so
376 find the elements at the beginning that are the same as the vector. */
378 finalize_ssa_def_ops (stmt
);
379 VEC_truncate (tree
, build_defs
, 0);
382 #define FINALIZE_OPBUILD build_uses
383 #define FINALIZE_OPBUILD_BASE(I) (tree *)VEC_index (tree, \
385 #define FINALIZE_OPBUILD_ELEM(I) (tree *)VEC_index (tree, \
387 #define FINALIZE_FUNC finalize_ssa_use_ops
388 #define FINALIZE_ALLOC alloc_use
389 #define FINALIZE_FREE free_uses
390 #define FINALIZE_TYPE struct use_optype_d
391 #define FINALIZE_ELEM(PTR) ((PTR)->use_ptr.use)
392 #define FINALIZE_OPS USE_OPS
393 #define FINALIZE_USE_PTR(PTR) USE_OP_PTR (PTR)
394 #define FINALIZE_CORRECT_USE correct_use_link
395 #define FINALIZE_BASE(VAR) VAR
396 #define FINALIZE_BASE_TYPE tree *
397 #define FINALIZE_BASE_ZERO NULL
398 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
399 (PTR)->use_ptr.use = (VAL); \
400 link_imm_use_stmt (&((PTR)->use_ptr), \
402 #include "tree-ssa-opfinalize.h"
404 /* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */
407 finalize_ssa_uses (tree stmt
)
409 #ifdef ENABLE_CHECKING
412 unsigned num
= VEC_length (tree
, build_uses
);
414 /* If the pointer to the operand is the statement itself, something is
415 wrong. It means that we are pointing to a local variable (the
416 initial call to get_stmt_operands does not pass a pointer to a
418 for (x
= 0; x
< num
; x
++)
419 gcc_assert (*((tree
*)VEC_index (tree
, build_uses
, x
)) != stmt
);
422 finalize_ssa_use_ops (stmt
);
423 VEC_truncate (tree
, build_uses
, 0);
427 /* Return a new v_may_def operand vector for STMT, comparing to OLD_OPS_P. */
428 #define FINALIZE_OPBUILD build_v_may_defs
429 #define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_v_may_defs, (I))
430 #define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
431 build_v_may_defs, (I)))
432 #define FINALIZE_FUNC finalize_ssa_v_may_def_ops
433 #define FINALIZE_ALLOC alloc_maydef
434 #define FINALIZE_FREE free_maydefs
435 #define FINALIZE_TYPE struct maydef_optype_d
436 #define FINALIZE_ELEM(PTR) MAYDEF_RESULT (PTR)
437 #define FINALIZE_OPS MAYDEF_OPS
438 #define FINALIZE_USE_PTR(PTR) MAYDEF_OP_PTR (PTR)
439 #define FINALIZE_CORRECT_USE set_virtual_use_link
440 #define FINALIZE_BASE_ZERO 0
441 #define FINALIZE_BASE(VAR) get_name_decl (VAR)
442 #define FINALIZE_BASE_TYPE unsigned
443 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
444 (PTR)->def_var = (VAL); \
445 (PTR)->use_var = (VAL); \
446 (PTR)->use_ptr.use = &((PTR)->use_var); \
447 link_imm_use_stmt (&((PTR)->use_ptr), \
449 #include "tree-ssa-opfinalize.h"
453 finalize_ssa_v_may_defs (tree stmt
)
455 finalize_ssa_v_may_def_ops (stmt
);
459 /* Clear the in_list bits and empty the build array for v_may_defs. */
462 cleanup_v_may_defs (void)
465 num
= VEC_length (tree
, build_v_may_defs
);
467 for (x
= 0; x
< num
; x
++)
469 tree t
= VEC_index (tree
, build_v_may_defs
, x
);
470 if (TREE_CODE (t
) != SSA_NAME
)
472 var_ann_t ann
= var_ann (t
);
473 ann
->in_v_may_def_list
= 0;
476 VEC_truncate (tree
, build_v_may_defs
, 0);
480 #define FINALIZE_OPBUILD build_vuses
481 #define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_vuses, (I))
482 #define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
484 #define FINALIZE_FUNC finalize_ssa_vuse_ops
485 #define FINALIZE_ALLOC alloc_vuse
486 #define FINALIZE_FREE free_vuses
487 #define FINALIZE_TYPE struct vuse_optype_d
488 #define FINALIZE_ELEM(PTR) VUSE_OP (PTR)
489 #define FINALIZE_OPS VUSE_OPS
490 #define FINALIZE_USE_PTR(PTR) VUSE_OP_PTR (PTR)
491 #define FINALIZE_CORRECT_USE set_virtual_use_link
492 #define FINALIZE_BASE_ZERO 0
493 #define FINALIZE_BASE(VAR) get_name_decl (VAR)
494 #define FINALIZE_BASE_TYPE unsigned
495 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
496 (PTR)->use_var = (VAL); \
497 (PTR)->use_ptr.use = &((PTR)->use_var); \
498 link_imm_use_stmt (&((PTR)->use_ptr), \
500 #include "tree-ssa-opfinalize.h"
503 /* Return a new vuse operand vector, comparing to OLD_OPS_P. */
506 finalize_ssa_vuses (tree stmt
)
508 unsigned num
, num_v_may_defs
;
511 /* Remove superfluous VUSE operands. If the statement already has a
512 V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is not
513 needed because V_MAY_DEFs imply a VUSE of the variable. For instance,
514 suppose that variable 'a' is aliased:
517 # a_3 = V_MAY_DEF <a_2>
520 The VUSE <a_2> is superfluous because it is implied by the V_MAY_DEF
523 num
= VEC_length (tree
, build_vuses
);
524 num_v_may_defs
= VEC_length (tree
, build_v_may_defs
);
526 if (num
> 0 && num_v_may_defs
> 0)
528 for (vuse_index
= 0; vuse_index
< VEC_length (tree
, build_vuses
); )
531 vuse
= VEC_index (tree
, build_vuses
, vuse_index
);
532 if (TREE_CODE (vuse
) != SSA_NAME
)
534 var_ann_t ann
= var_ann (vuse
);
535 ann
->in_vuse_list
= 0;
536 if (ann
->in_v_may_def_list
)
538 VEC_ordered_remove (tree
, build_vuses
, vuse_index
);
546 /* Clear out the in_list bits. */
548 vuse_index
< VEC_length (tree
, build_vuses
);
551 tree t
= VEC_index (tree
, build_vuses
, vuse_index
);
552 if (TREE_CODE (t
) != SSA_NAME
)
554 var_ann_t ann
= var_ann (t
);
555 ann
->in_vuse_list
= 0;
559 finalize_ssa_vuse_ops (stmt
);
560 /* The v_may_def build vector wasn't cleaned up because we needed it. */
561 cleanup_v_may_defs ();
563 /* Free the vuses build vector. */
564 VEC_truncate (tree
, build_vuses
, 0);
568 /* Return a new v_must_def operand vector for STMT, comparing to OLD_OPS_P. */
570 #define FINALIZE_OPBUILD build_v_must_defs
571 #define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_v_must_defs, (I))
572 #define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
573 build_v_must_defs, (I)))
574 #define FINALIZE_FUNC finalize_ssa_v_must_def_ops
575 #define FINALIZE_ALLOC alloc_mustdef
576 #define FINALIZE_FREE free_mustdefs
577 #define FINALIZE_TYPE struct mustdef_optype_d
578 #define FINALIZE_ELEM(PTR) MUSTDEF_RESULT (PTR)
579 #define FINALIZE_OPS MUSTDEF_OPS
580 #define FINALIZE_USE_PTR(PTR) MUSTDEF_KILL_PTR (PTR)
581 #define FINALIZE_CORRECT_USE set_virtual_use_link
582 #define FINALIZE_BASE_ZERO 0
583 #define FINALIZE_BASE(VAR) get_name_decl (VAR)
584 #define FINALIZE_BASE_TYPE unsigned
585 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
586 (PTR)->def_var = (VAL); \
587 (PTR)->kill_var = (VAL); \
588 (PTR)->use_ptr.use = &((PTR)->kill_var);\
589 link_imm_use_stmt (&((PTR)->use_ptr), \
591 #include "tree-ssa-opfinalize.h"
595 finalize_ssa_v_must_defs (tree stmt
)
597 /* In the presence of subvars, there may be more than one V_MUST_DEF per
598 statement (one for each subvar). It is a bit expensive to verify that
599 all must-defs in a statement belong to subvars if there is more than one
600 MUST-def, so we don't do it. Suffice to say, if you reach here without
601 having subvars, and have num >1, you have hit a bug. */
603 finalize_ssa_v_must_def_ops (stmt
);
604 VEC_truncate (tree
, build_v_must_defs
, 0);
608 /* Finalize all the build vectors, fill the new ones into INFO. */
611 finalize_ssa_stmt_operands (tree stmt
)
613 finalize_ssa_defs (stmt
);
614 finalize_ssa_uses (stmt
);
615 finalize_ssa_v_must_defs (stmt
);
616 finalize_ssa_v_may_defs (stmt
);
617 finalize_ssa_vuses (stmt
);
621 /* Start the process of building up operands vectors in INFO. */
624 start_ssa_stmt_operands (void)
626 gcc_assert (VEC_length (tree
, build_defs
) == 0);
627 gcc_assert (VEC_length (tree
, build_uses
) == 0);
628 gcc_assert (VEC_length (tree
, build_vuses
) == 0);
629 gcc_assert (VEC_length (tree
, build_v_may_defs
) == 0);
630 gcc_assert (VEC_length (tree
, build_v_must_defs
) == 0);
634 /* Add DEF_P to the list of pointers to operands. */
637 append_def (tree
*def_p
)
639 VEC_safe_push (tree
, heap
, build_defs
, (tree
)def_p
);
643 /* Add USE_P to the list of pointers to operands. */
646 append_use (tree
*use_p
)
648 VEC_safe_push (tree
, heap
, build_uses
, (tree
)use_p
);
652 /* Add a new virtual may def for variable VAR to the build array. */
655 append_v_may_def (tree var
)
657 if (TREE_CODE (var
) != SSA_NAME
)
659 var_ann_t ann
= get_var_ann (var
);
661 /* Don't allow duplicate entries. */
662 if (ann
->in_v_may_def_list
)
664 ann
->in_v_may_def_list
= 1;
667 VEC_safe_push (tree
, heap
, build_v_may_defs
, (tree
)var
);
671 /* Add VAR to the list of virtual uses. */
674 append_vuse (tree var
)
677 /* Don't allow duplicate entries. */
678 if (TREE_CODE (var
) != SSA_NAME
)
680 var_ann_t ann
= get_var_ann (var
);
682 if (ann
->in_vuse_list
|| ann
->in_v_may_def_list
)
684 ann
->in_vuse_list
= 1;
687 VEC_safe_push (tree
, heap
, build_vuses
, (tree
)var
);
691 /* Add VAR to the list of virtual must definitions for INFO. */
694 append_v_must_def (tree var
)
698 /* Don't allow duplicate entries. */
699 for (i
= 0; i
< VEC_length (tree
, build_v_must_defs
); i
++)
700 if (var
== VEC_index (tree
, build_v_must_defs
, i
))
703 VEC_safe_push (tree
, heap
, build_v_must_defs
, (tree
)var
);
707 /* Parse STMT looking for operands. OLD_OPS is the original stmt operand
708 cache for STMT, if it existed before. When finished, the various build_*
709 operand vectors will have potential operands. in them. */
712 parse_ssa_operands (tree stmt
)
716 code
= TREE_CODE (stmt
);
720 /* First get operands from the RHS. For the LHS, we use a V_MAY_DEF if
721 either only part of LHS is modified or if the RHS might throw,
722 otherwise, use V_MUST_DEF.
724 ??? If it might throw, we should represent somehow that it is killed
725 on the fallthrough path. */
727 tree lhs
= TREE_OPERAND (stmt
, 0);
728 int lhs_flags
= opf_is_def
;
730 get_expr_operands (stmt
, &TREE_OPERAND (stmt
, 1), opf_none
);
732 /* If the LHS is a VIEW_CONVERT_EXPR, it isn't changing whether
733 or not the entire LHS is modified; that depends on what's
734 inside the VIEW_CONVERT_EXPR. */
735 if (TREE_CODE (lhs
) == VIEW_CONVERT_EXPR
)
736 lhs
= TREE_OPERAND (lhs
, 0);
738 if (TREE_CODE (lhs
) != ARRAY_REF
739 && TREE_CODE (lhs
) != ARRAY_RANGE_REF
740 && TREE_CODE (lhs
) != BIT_FIELD_REF
741 && TREE_CODE (lhs
) != REALPART_EXPR
742 && TREE_CODE (lhs
) != IMAGPART_EXPR
)
743 lhs_flags
|= opf_kill_def
;
745 get_expr_operands (stmt
, &TREE_OPERAND (stmt
, 0), lhs_flags
);
750 get_expr_operands (stmt
, &COND_EXPR_COND (stmt
), opf_none
);
754 get_expr_operands (stmt
, &SWITCH_COND (stmt
), opf_none
);
758 get_asm_expr_operands (stmt
);
762 get_expr_operands (stmt
, &TREE_OPERAND (stmt
, 0), opf_none
);
766 get_expr_operands (stmt
, &GOTO_DESTINATION (stmt
), opf_none
);
770 get_expr_operands (stmt
, &LABEL_EXPR_LABEL (stmt
), opf_none
);
773 /* These nodes contain no variable references. */
775 case CASE_LABEL_EXPR
:
777 case TRY_FINALLY_EXPR
:
784 /* Notice that if get_expr_operands tries to use &STMT as the operand
785 pointer (which may only happen for USE operands), we will fail in
786 append_use. This default will handle statements like empty
787 statements, or CALL_EXPRs that may appear on the RHS of a statement
788 or as statements themselves. */
789 get_expr_operands (stmt
, &stmt
, opf_none
);
794 /* Create an operands cache for STMT, returning it in NEW_OPS. OLD_OPS are the
795 original operands, and if ANN is non-null, appropriate stmt flags are set
796 in the stmt's annotation. If ANN is NULL, this is not considered a "real"
797 stmt, and none of the operands will be entered into their respective
798 immediate uses tables. This is to allow stmts to be processed when they
799 are not actually in the CFG.
801 Note that some fields in old_ops may change to NULL, although none of the
802 memory they originally pointed to will be destroyed. It is appropriate
803 to call free_stmt_operands() on the value returned in old_ops.
805 The rationale for this: Certain optimizations wish to examine the difference
806 between new_ops and old_ops after processing. If a set of operands don't
807 change, new_ops will simply assume the pointer in old_ops, and the old_ops
808 pointer will be set to NULL, indicating no memory needs to be cleared.
809 Usage might appear something like:
811 old_ops_copy = old_ops = stmt_ann(stmt)->operands;
812 build_ssa_operands (stmt, NULL, &old_ops, &new_ops);
813 <* compare old_ops_copy and new_ops *>
814 free_ssa_operands (old_ops); */
817 build_ssa_operands (tree stmt
)
819 stmt_ann_t ann
= get_stmt_ann (stmt
);
821 /* Initially assume that the statement has no volatile operands, nor
822 makes aliased loads or stores. */
825 ann
->has_volatile_ops
= false;
826 ann
->makes_aliased_stores
= false;
827 ann
->makes_aliased_loads
= false;
830 start_ssa_stmt_operands ();
832 parse_ssa_operands (stmt
);
833 operand_build_sort_virtual (build_vuses
);
834 operand_build_sort_virtual (build_v_may_defs
);
835 operand_build_sort_virtual (build_v_must_defs
);
837 finalize_ssa_stmt_operands (stmt
);
841 /* Free any operands vectors in OPS. */
843 free_ssa_operands (stmt_operands_p ops
)
847 ops
->maydef_ops
= NULL
;
848 ops
->mustdef_ops
= NULL
;
849 ops
->vuse_ops
= NULL
;
853 /* Get the operands of statement STMT. Note that repeated calls to
854 get_stmt_operands for the same statement will do nothing until the
855 statement is marked modified by a call to mark_stmt_modified(). */
858 update_stmt_operands (tree stmt
)
860 stmt_ann_t ann
= get_stmt_ann (stmt
);
861 /* If get_stmt_operands is called before SSA is initialized, dont
863 if (!ssa_operands_active ())
865 /* The optimizers cannot handle statements that are nothing but a
866 _DECL. This indicates a bug in the gimplifier. */
867 gcc_assert (!SSA_VAR_P (stmt
));
869 gcc_assert (ann
->modified
);
871 timevar_push (TV_TREE_OPS
);
873 build_ssa_operands (stmt
);
875 /* Clear the modified bit for STMT. Subsequent calls to
876 get_stmt_operands for this statement will do nothing until the
877 statement is marked modified by a call to mark_stmt_modified(). */
880 timevar_pop (TV_TREE_OPS
);
884 /* Copies virtual operands from SRC to DST. */
887 copy_virtual_operands (tree dest
, tree src
)
890 ssa_op_iter iter
, old_iter
;
891 use_operand_p use_p
, u2
;
892 def_operand_p def_p
, d2
;
894 build_ssa_operands (dest
);
896 /* Copy all the virtual fields. */
897 FOR_EACH_SSA_TREE_OPERAND (t
, src
, iter
, SSA_OP_VUSE
)
899 FOR_EACH_SSA_TREE_OPERAND (t
, src
, iter
, SSA_OP_VMAYDEF
)
900 append_v_may_def (t
);
901 FOR_EACH_SSA_TREE_OPERAND (t
, src
, iter
, SSA_OP_VMUSTDEF
)
902 append_v_must_def (t
);
904 if (VEC_length (tree
, build_vuses
) == 0
905 && VEC_length (tree
, build_v_may_defs
) == 0
906 && VEC_length (tree
, build_v_must_defs
) == 0)
909 /* Now commit the virtual operands to this stmt. */
910 finalize_ssa_v_must_defs (dest
);
911 finalize_ssa_v_may_defs (dest
);
912 finalize_ssa_vuses (dest
);
914 /* Finally, set the field to the same values as then originals. */
917 t
= op_iter_init_tree (&old_iter
, src
, SSA_OP_VUSE
);
918 FOR_EACH_SSA_USE_OPERAND (use_p
, dest
, iter
, SSA_OP_VUSE
)
920 gcc_assert (!op_iter_done (&old_iter
));
922 t
= op_iter_next_tree (&old_iter
);
924 gcc_assert (op_iter_done (&old_iter
));
926 op_iter_init_maydef (&old_iter
, src
, &u2
, &d2
);
927 FOR_EACH_SSA_MAYDEF_OPERAND (def_p
, use_p
, dest
, iter
)
929 gcc_assert (!op_iter_done (&old_iter
));
930 SET_USE (use_p
, USE_FROM_PTR (u2
));
931 SET_DEF (def_p
, DEF_FROM_PTR (d2
));
932 op_iter_next_maymustdef (&u2
, &d2
, &old_iter
);
934 gcc_assert (op_iter_done (&old_iter
));
936 op_iter_init_mustdef (&old_iter
, src
, &u2
, &d2
);
937 FOR_EACH_SSA_MUSTDEF_OPERAND (def_p
, use_p
, dest
, iter
)
939 gcc_assert (!op_iter_done (&old_iter
));
940 SET_USE (use_p
, USE_FROM_PTR (u2
));
941 SET_DEF (def_p
, DEF_FROM_PTR (d2
));
942 op_iter_next_maymustdef (&u2
, &d2
, &old_iter
);
944 gcc_assert (op_iter_done (&old_iter
));
949 /* Specifically for use in DOM's expression analysis. Given a store, we
950 create an artificial stmt which looks like a load from the store, this can
951 be used to eliminate redundant loads. OLD_OPS are the operands from the
952 store stmt, and NEW_STMT is the new load which represents a load of the
956 create_ssa_artficial_load_stmt (tree new_stmt
, tree old_stmt
)
964 ann
= get_stmt_ann (new_stmt
);
966 /* process the stmt looking for operands. */
967 start_ssa_stmt_operands ();
968 parse_ssa_operands (new_stmt
);
970 for (x
= 0; x
< VEC_length (tree
, build_vuses
); x
++)
972 tree t
= VEC_index (tree
, build_vuses
, x
);
973 if (TREE_CODE (t
) != SSA_NAME
)
975 var_ann_t ann
= var_ann (t
);
976 ann
->in_vuse_list
= 0;
980 for (x
= 0; x
< VEC_length (tree
, build_v_may_defs
); x
++)
982 tree t
= VEC_index (tree
, build_v_may_defs
, x
);
983 if (TREE_CODE (t
) != SSA_NAME
)
985 var_ann_t ann
= var_ann (t
);
986 ann
->in_v_may_def_list
= 0;
989 /* Remove any virtual operands that were found. */
990 VEC_truncate (tree
, build_v_may_defs
, 0);
991 VEC_truncate (tree
, build_v_must_defs
, 0);
992 VEC_truncate (tree
, build_vuses
, 0);
994 /* For each VDEF on the original statement, we want to create a
995 VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
997 FOR_EACH_SSA_TREE_OPERAND (op
, old_stmt
, iter
,
998 (SSA_OP_VMAYDEF
| SSA_OP_VMUSTDEF
))
1001 /* Now build the operands for this new stmt. */
1002 finalize_ssa_stmt_operands (new_stmt
);
1004 /* All uses in this fake stmt must not be in the immediate use lists. */
1005 FOR_EACH_SSA_USE_OPERAND (use_p
, new_stmt
, iter
, SSA_OP_ALL_USES
)
1006 delink_imm_use (use_p
);
1010 swap_tree_operands (tree stmt
, tree
*exp0
, tree
*exp1
)
1016 /* If the operand cache is active, attempt to preserve the relative positions
1017 of these two operands in their respective immediate use lists. */
1018 if (ssa_operands_active () && op0
!= op1
)
1020 use_optype_p use0
, use1
, ptr
;
1022 /* Find the 2 operands in the cache, if they are there. */
1023 for (ptr
= USE_OPS (stmt
); ptr
; ptr
= ptr
->next
)
1024 if (USE_OP_PTR (ptr
)->use
== exp0
)
1029 for (ptr
= USE_OPS (stmt
); ptr
; ptr
= ptr
->next
)
1030 if (USE_OP_PTR (ptr
)->use
== exp1
)
1035 /* If both uses don't have operand entries, there isn't much we can do
1036 at this point. Presumably we dont need to worry about it. */
1039 tree
*tmp
= USE_OP_PTR (use1
)->use
;
1040 USE_OP_PTR (use1
)->use
= USE_OP_PTR (use0
)->use
;
1041 USE_OP_PTR (use0
)->use
= tmp
;
1045 /* Now swap the data. */
1051 /* Recursively scan the expression pointed to by EXPR_P in statement referred
1052 to by INFO. FLAGS is one of the OPF_* constants modifying how to interpret
1053 the operands found. */
1056 get_expr_operands (tree stmt
, tree
*expr_p
, int flags
)
1058 enum tree_code code
;
1059 enum tree_code_class
class;
1060 tree expr
= *expr_p
;
1061 stmt_ann_t s_ann
= stmt_ann (stmt
);
1066 code
= TREE_CODE (expr
);
1067 class = TREE_CODE_CLASS (code
);
1072 /* We could have the address of a component, array member,
1073 etc which has interesting variable references. */
1074 /* Taking the address of a variable does not represent a
1075 reference to it, but the fact that the stmt takes its address will be
1076 of interest to some passes (e.g. alias resolution). */
1077 add_stmt_operand (expr_p
, s_ann
, 0);
1079 /* If the address is invariant, there may be no interesting variable
1080 references inside. */
1081 if (is_gimple_min_invariant (expr
))
1084 /* There should be no VUSEs created, since the referenced objects are
1085 not really accessed. The only operands that we should find here
1086 are ARRAY_REF indices which will always be real operands (GIMPLE
1087 does not allow non-registers as array indices). */
1088 flags
|= opf_no_vops
;
1090 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
1101 /* Add the subvars for a variable if it has subvars, to DEFS or USES.
1102 Otherwise, add the variable itself.
1103 Whether it goes to USES or DEFS depends on the operand flags. */
1104 if (var_can_have_subvars (expr
)
1105 && (svars
= get_subvars_for_var (expr
)))
1108 for (sv
= svars
; sv
; sv
= sv
->next
)
1109 add_stmt_operand (&sv
->var
, s_ann
, flags
);
1113 add_stmt_operand (expr_p
, s_ann
, flags
);
1117 case MISALIGNED_INDIRECT_REF
:
1118 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), flags
);
1121 case ALIGN_INDIRECT_REF
:
1123 get_indirect_ref_operands (stmt
, expr
, flags
);
1126 case TARGET_MEM_REF
:
1127 get_tmr_operands (stmt
, expr
, flags
);
1131 case ARRAY_RANGE_REF
:
1132 /* Treat array references as references to the virtual variable
1133 representing the array. The virtual variable for an ARRAY_REF
1134 is the VAR_DECL for the array. */
1136 /* Add the virtual variable for the ARRAY_REF to VDEFS or VUSES
1137 according to the value of IS_DEF. Recurse if the LHS of the
1138 ARRAY_REF node is not a regular variable. */
1139 if (SSA_VAR_P (TREE_OPERAND (expr
, 0)))
1140 add_stmt_operand (expr_p
, s_ann
, flags
);
1142 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
1144 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), opf_none
);
1145 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_none
);
1146 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 3), opf_none
);
1154 unsigned HOST_WIDE_INT offset
, size
;
1155 /* This component ref becomes an access to all of the subvariables
1156 it can touch, if we can determine that, but *NOT* the real one.
1157 If we can't determine which fields we could touch, the recursion
1158 will eventually get to a variable and add *all* of its subvars, or
1159 whatever is the minimum correct subset. */
1161 ref
= okay_component_ref_for_subvars (expr
, &offset
, &size
);
1164 subvar_t svars
= get_subvars_for_var (ref
);
1166 for (sv
= svars
; sv
; sv
= sv
->next
)
1169 if (overlap_subvar (offset
, size
, sv
, &exact
))
1171 int subvar_flags
= flags
;
1173 subvar_flags
&= ~opf_kill_def
;
1174 add_stmt_operand (&sv
->var
, s_ann
, subvar_flags
);
1179 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0),
1180 flags
& ~opf_kill_def
);
1182 if (code
== COMPONENT_REF
)
1184 if (s_ann
&& TREE_THIS_VOLATILE (TREE_OPERAND (expr
, 1)))
1185 s_ann
->has_volatile_ops
= true;
1186 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_none
);
1190 case WITH_SIZE_EXPR
:
1191 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
1192 and an rvalue reference to its second argument. */
1193 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), opf_none
);
1194 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
1198 get_call_expr_operands (stmt
, expr
);
1203 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), opf_none
);
1204 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), opf_none
);
1205 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_none
);
1213 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), opf_none
);
1215 op
= TREE_OPERAND (expr
, 0);
1216 if (TREE_CODE (op
) == WITH_SIZE_EXPR
)
1217 op
= TREE_OPERAND (expr
, 0);
1218 if (TREE_CODE (op
) == ARRAY_REF
1219 || TREE_CODE (op
) == ARRAY_RANGE_REF
1220 || TREE_CODE (op
) == REALPART_EXPR
1221 || TREE_CODE (op
) == IMAGPART_EXPR
)
1222 subflags
= opf_is_def
;
1224 subflags
= opf_is_def
| opf_kill_def
;
1226 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), subflags
);
1232 /* General aggregate CONSTRUCTORs have been decomposed, but they
1233 are still in use as the COMPLEX_EXPR equivalent for vectors. */
1234 constructor_elt
*ce
;
1235 unsigned HOST_WIDE_INT idx
;
1238 VEC_iterate (constructor_elt
, CONSTRUCTOR_ELTS (expr
), idx
, ce
);
1240 get_expr_operands (stmt
, &ce
->value
, opf_none
);
1245 case TRUTH_NOT_EXPR
:
1247 case VIEW_CONVERT_EXPR
:
1249 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
1252 case TRUTH_AND_EXPR
:
1254 case TRUTH_XOR_EXPR
:
1260 tree op0
= TREE_OPERAND (expr
, 0);
1261 tree op1
= TREE_OPERAND (expr
, 1);
1263 /* If it would be profitable to swap the operands, then do so to
1264 canonicalize the statement, enabling better optimization.
1266 By placing canonicalization of such expressions here we
1267 transparently keep statements in canonical form, even
1268 when the statement is modified. */
1269 if (tree_swap_operands_p (op0
, op1
, false))
1271 /* For relationals we need to swap the operands
1272 and change the code. */
1278 TREE_SET_CODE (expr
, swap_tree_comparison (code
));
1279 swap_tree_operands (stmt
,
1280 &TREE_OPERAND (expr
, 0),
1281 &TREE_OPERAND (expr
, 1));
1284 /* For a commutative operator we can just swap the operands. */
1285 else if (commutative_tree_code (code
))
1287 swap_tree_operands (stmt
,
1288 &TREE_OPERAND (expr
, 0),
1289 &TREE_OPERAND (expr
, 1));
1293 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
1294 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), flags
);
1298 case REALIGN_LOAD_EXPR
:
1300 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
1301 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), flags
);
1302 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), flags
);
1311 /* Expressions that make no memory references. */
1315 if (class == tcc_unary
)
1317 if (class == tcc_binary
|| class == tcc_comparison
)
1319 if (class == tcc_constant
|| class == tcc_type
)
1323 /* If we get here, something has gone wrong. */
1324 #ifdef ENABLE_CHECKING
1325 fprintf (stderr
, "unhandled expression in get_expr_operands():\n");
1327 fputs ("\n", stderr
);
1328 internal_error ("internal error");
1334 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
1337 get_asm_expr_operands (tree stmt
)
1339 stmt_ann_t s_ann
= stmt_ann (stmt
);
1340 int noutputs
= list_length (ASM_OUTPUTS (stmt
));
1341 const char **oconstraints
1342 = (const char **) alloca ((noutputs
) * sizeof (const char *));
1345 const char *constraint
;
1346 bool allows_mem
, allows_reg
, is_inout
;
1348 for (i
=0, link
= ASM_OUTPUTS (stmt
); link
; ++i
, link
= TREE_CHAIN (link
))
1350 oconstraints
[i
] = constraint
1351 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1352 parse_output_constraint (&constraint
, i
, 0, 0,
1353 &allows_mem
, &allows_reg
, &is_inout
);
1355 /* This should have been split in gimplify_asm_expr. */
1356 gcc_assert (!allows_reg
|| !is_inout
);
1358 /* Memory operands are addressable. Note that STMT needs the
1359 address of this operand. */
1360 if (!allows_reg
&& allows_mem
)
1362 tree t
= get_base_address (TREE_VALUE (link
));
1363 if (t
&& DECL_P (t
) && s_ann
)
1364 add_to_addressable_set (t
, &s_ann
->addresses_taken
);
1367 get_expr_operands (stmt
, &TREE_VALUE (link
), opf_is_def
);
1370 for (link
= ASM_INPUTS (stmt
); link
; link
= TREE_CHAIN (link
))
1373 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1374 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
1375 oconstraints
, &allows_mem
, &allows_reg
);
1377 /* Memory operands are addressable. Note that STMT needs the
1378 address of this operand. */
1379 if (!allows_reg
&& allows_mem
)
1381 tree t
= get_base_address (TREE_VALUE (link
));
1382 if (t
&& DECL_P (t
) && s_ann
)
1383 add_to_addressable_set (t
, &s_ann
->addresses_taken
);
1386 get_expr_operands (stmt
, &TREE_VALUE (link
), 0);
1390 /* Clobber memory for asm ("" : : : "memory"); */
1391 for (link
= ASM_CLOBBERS (stmt
); link
; link
= TREE_CHAIN (link
))
1392 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link
)), "memory") == 0)
1397 /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
1398 decided to group them). */
1400 add_stmt_operand (&global_var
, s_ann
, opf_is_def
);
1402 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars
, 0, i
, bi
)
1404 tree var
= referenced_var (i
);
1405 add_stmt_operand (&var
, s_ann
, opf_is_def
| opf_non_specific
);
1408 /* Now clobber all addressables. */
1409 EXECUTE_IF_SET_IN_BITMAP (addressable_vars
, 0, i
, bi
)
1411 tree var
= referenced_var (i
);
1413 /* Subvars are explicitly represented in this list, so
1414 we don't need the original to be added to the clobber
1415 ops, but the original *will* be in this list because
1416 we keep the addressability of the original
1417 variable up-to-date so we don't screw up the rest of
1419 if (var_can_have_subvars (var
)
1420 && get_subvars_for_var (var
) != NULL
)
1423 add_stmt_operand (&var
, s_ann
, opf_is_def
| opf_non_specific
);
1430 /* A subroutine of get_expr_operands to handle INDIRECT_REF,
1431 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF. */
1434 get_indirect_ref_operands (tree stmt
, tree expr
, int flags
)
1436 tree
*pptr
= &TREE_OPERAND (expr
, 0);
1438 stmt_ann_t s_ann
= stmt_ann (stmt
);
1440 /* Stores into INDIRECT_REF operands are never killing definitions. */
1441 flags
&= ~opf_kill_def
;
1443 if (SSA_VAR_P (ptr
))
1445 struct ptr_info_def
*pi
= NULL
;
1447 /* If PTR has flow-sensitive points-to information, use it. */
1448 if (TREE_CODE (ptr
) == SSA_NAME
1449 && (pi
= SSA_NAME_PTR_INFO (ptr
)) != NULL
1450 && pi
->name_mem_tag
)
1452 /* PTR has its own memory tag. Use it. */
1453 add_stmt_operand (&pi
->name_mem_tag
, s_ann
, flags
);
1457 /* If PTR is not an SSA_NAME or it doesn't have a name
1458 tag, use its type memory tag. */
1461 /* If we are emitting debugging dumps, display a warning if
1462 PTR is an SSA_NAME with no flow-sensitive alias
1463 information. That means that we may need to compute
1466 && TREE_CODE (ptr
) == SSA_NAME
1470 "NOTE: no flow-sensitive alias info for ");
1471 print_generic_expr (dump_file
, ptr
, dump_flags
);
1472 fprintf (dump_file
, " in ");
1473 print_generic_stmt (dump_file
, stmt
, dump_flags
);
1476 if (TREE_CODE (ptr
) == SSA_NAME
)
1477 ptr
= SSA_NAME_VAR (ptr
);
1478 v_ann
= var_ann (ptr
);
1479 if (v_ann
->type_mem_tag
)
1480 add_stmt_operand (&v_ann
->type_mem_tag
, s_ann
, flags
);
1484 /* If a constant is used as a pointer, we can't generate a real
1485 operand for it but we mark the statement volatile to prevent
1486 optimizations from messing things up. */
1487 else if (TREE_CODE (ptr
) == INTEGER_CST
)
1490 s_ann
->has_volatile_ops
= true;
1494 /* Everything else *should* have been folded elsewhere, but users
1495 are smarter than we in finding ways to write invalid code. We
1496 cannot just assert here. If we were absolutely certain that we
1497 do handle all valid cases, then we could just do nothing here.
1498 That seems optimistic, so attempt to do something logical... */
1499 else if ((TREE_CODE (ptr
) == PLUS_EXPR
|| TREE_CODE (ptr
) == MINUS_EXPR
)
1500 && TREE_CODE (TREE_OPERAND (ptr
, 0)) == ADDR_EXPR
1501 && TREE_CODE (TREE_OPERAND (ptr
, 1)) == INTEGER_CST
)
1503 /* Make sure we know the object is addressable. */
1504 pptr
= &TREE_OPERAND (ptr
, 0);
1505 add_stmt_operand (pptr
, s_ann
, 0);
1507 /* Mark the object itself with a VUSE. */
1508 pptr
= &TREE_OPERAND (*pptr
, 0);
1509 get_expr_operands (stmt
, pptr
, flags
);
1513 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1517 /* Add a USE operand for the base pointer. */
1518 get_expr_operands (stmt
, pptr
, opf_none
);
1521 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
1524 get_tmr_operands (tree stmt
, tree expr
, int flags
)
1526 tree tag
= TMR_TAG (expr
), ref
;
1527 unsigned HOST_WIDE_INT offset
, size
;
1529 stmt_ann_t s_ann
= stmt_ann (stmt
);
1531 /* First record the real operands. */
1532 get_expr_operands (stmt
, &TMR_BASE (expr
), opf_none
);
1533 get_expr_operands (stmt
, &TMR_INDEX (expr
), opf_none
);
1535 /* MEM_REFs should never be killing. */
1536 flags
&= ~opf_kill_def
;
1538 if (TMR_SYMBOL (expr
))
1540 stmt_ann_t ann
= stmt_ann (stmt
);
1541 add_to_addressable_set (TMR_SYMBOL (expr
), &ann
->addresses_taken
);
1546 /* Something weird, so ensure that we will be careful. */
1547 stmt_ann (stmt
)->has_volatile_ops
= true;
1553 get_expr_operands (stmt
, &tag
, flags
);
1557 ref
= okay_component_ref_for_subvars (tag
, &offset
, &size
);
1558 gcc_assert (ref
!= NULL_TREE
);
1559 svars
= get_subvars_for_var (ref
);
1560 for (sv
= svars
; sv
; sv
= sv
->next
)
1563 if (overlap_subvar (offset
, size
, sv
, &exact
))
1565 int subvar_flags
= flags
;
1567 subvar_flags
&= ~opf_kill_def
;
1568 add_stmt_operand (&sv
->var
, s_ann
, subvar_flags
);
1573 /* A subroutine of get_expr_operands to handle CALL_EXPR. */
1576 get_call_expr_operands (tree stmt
, tree expr
)
1579 int call_flags
= call_expr_flags (expr
);
1581 /* If aliases have been computed already, add V_MAY_DEF or V_USE
1582 operands for all the symbols that have been found to be
1585 Note that if aliases have not been computed, the global effects
1586 of calls will not be included in the SSA web. This is fine
1587 because no optimizer should run before aliases have been
1588 computed. By not bothering with virtual operands for CALL_EXPRs
1589 we avoid adding superfluous virtual operands, which can be a
1590 significant compile time sink (See PR 15855). */
1591 if (aliases_computed_p
1592 && !bitmap_empty_p (call_clobbered_vars
)
1593 && !(call_flags
& ECF_NOVOPS
))
1595 /* A 'pure' or a 'const' function never call-clobbers anything.
1596 A 'noreturn' function might, but since we don't return anyway
1597 there is no point in recording that. */
1598 if (TREE_SIDE_EFFECTS (expr
)
1599 && !(call_flags
& (ECF_PURE
| ECF_CONST
| ECF_NORETURN
)))
1600 add_call_clobber_ops (stmt
, get_callee_fndecl (expr
));
1601 else if (!(call_flags
& ECF_CONST
))
1602 add_call_read_ops (stmt
);
1605 /* Find uses in the called function. */
1606 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), opf_none
);
1608 for (op
= TREE_OPERAND (expr
, 1); op
; op
= TREE_CHAIN (op
))
1609 get_expr_operands (stmt
, &TREE_VALUE (op
), opf_none
);
1611 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_none
);
1616 /* Add *VAR_P to the appropriate operand array for INFO. FLAGS is as in
1617 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1618 the statement's real operands, otherwise it is added to virtual
1622 add_stmt_operand (tree
*var_p
, stmt_ann_t s_ann
, int flags
)
1631 /* If the operand is an ADDR_EXPR, add its operand to the list of
1632 variables that have had their address taken in this statement. */
1633 if (TREE_CODE (var
) == ADDR_EXPR
&& s_ann
)
1635 add_to_addressable_set (TREE_OPERAND (var
, 0), &s_ann
->addresses_taken
);
1639 /* If the original variable is not a scalar, it will be added to the list
1640 of virtual operands. In that case, use its base symbol as the virtual
1641 variable representing it. */
1642 is_real_op
= is_gimple_reg (var
);
1643 if (!is_real_op
&& !DECL_P (var
))
1644 var
= get_virtual_var (var
);
1646 /* If VAR is not a variable that we care to optimize, do nothing. */
1647 if (var
== NULL_TREE
|| !SSA_VAR_P (var
))
1650 sym
= (TREE_CODE (var
) == SSA_NAME
? SSA_NAME_VAR (var
) : var
);
1651 v_ann
= var_ann (sym
);
1653 /* Mark statements with volatile operands. Optimizers should back
1654 off from statements having volatile operands. */
1655 if (TREE_THIS_VOLATILE (sym
) && s_ann
)
1656 s_ann
->has_volatile_ops
= true;
1658 /* If the variable cannot be modified and this is a V_MAY_DEF change
1659 it into a VUSE. This happens when read-only variables are marked
1660 call-clobbered and/or aliased to writable variables. So we only
1661 check that this only happens on non-specific stores.
1663 Note that if this is a specific store, i.e. associated with a
1664 modify_expr, then we can't suppress the V_DEF, lest we run into
1665 validation problems.
1667 This can happen when programs cast away const, leaving us with a
1668 store to read-only memory. If the statement is actually executed
1669 at runtime, then the program is ill formed. If the statement is
1670 not executed then all is well. At the very least, we cannot ICE. */
1671 if ((flags
& opf_non_specific
) && unmodifiable_var_p (var
))
1673 gcc_assert (!is_real_op
);
1674 flags
&= ~(opf_is_def
| opf_kill_def
);
1679 /* The variable is a GIMPLE register. Add it to real operands. */
1680 if (flags
& opf_is_def
)
1687 varray_type aliases
;
1689 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1690 virtual operands, unless the caller has specifically requested
1691 not to add virtual operands (used when adding operands inside an
1692 ADDR_EXPR expression). */
1693 if (flags
& opf_no_vops
)
1696 aliases
= v_ann
->may_aliases
;
1698 if (aliases
== NULL
)
1700 /* The variable is not aliased or it is an alias tag. */
1701 if (flags
& opf_is_def
)
1703 if (flags
& opf_kill_def
)
1705 /* Only regular variables or struct fields may get a
1706 V_MUST_DEF operand. */
1707 gcc_assert (v_ann
->mem_tag_kind
== NOT_A_TAG
1708 || v_ann
->mem_tag_kind
== STRUCT_FIELD
);
1709 /* V_MUST_DEF for non-aliased, non-GIMPLE register
1710 variable definitions. */
1711 append_v_must_def (var
);
1715 /* Add a V_MAY_DEF for call-clobbered variables and
1717 append_v_may_def (var
);
1723 if (s_ann
&& v_ann
->is_alias_tag
)
1724 s_ann
->makes_aliased_loads
= 1;
1731 /* The variable is aliased. Add its aliases to the virtual
1733 gcc_assert (VARRAY_ACTIVE_SIZE (aliases
) != 0);
1735 if (flags
& opf_is_def
)
1737 /* If the variable is also an alias tag, add a virtual
1738 operand for it, otherwise we will miss representing
1739 references to the members of the variable's alias set.
1740 This fixes the bug in gcc.c-torture/execute/20020503-1.c. */
1741 if (v_ann
->is_alias_tag
)
1742 append_v_may_def (var
);
1744 for (i
= 0; i
< VARRAY_ACTIVE_SIZE (aliases
); i
++)
1745 append_v_may_def (VARRAY_TREE (aliases
, i
));
1748 s_ann
->makes_aliased_stores
= 1;
1752 /* Similarly, append a virtual uses for VAR itself, when
1753 it is an alias tag. */
1754 if (v_ann
->is_alias_tag
)
1757 for (i
= 0; i
< VARRAY_ACTIVE_SIZE (aliases
); i
++)
1758 append_vuse (VARRAY_TREE (aliases
, i
));
1761 s_ann
->makes_aliased_loads
= 1;
1768 /* Add the base address of REF to the set *ADDRESSES_TAKEN. If
1769 *ADDRESSES_TAKEN is NULL, a new set is created. REF may be
1770 a single variable whose address has been taken or any other valid
1771 GIMPLE memory reference (structure reference, array, etc). If the
1772 base address of REF is a decl that has sub-variables, also add all
1773 of its sub-variables. */
1776 add_to_addressable_set (tree ref
, bitmap
*addresses_taken
)
1781 gcc_assert (addresses_taken
);
1783 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
1784 as the only thing we take the address of. If VAR is a structure,
1785 taking the address of a field means that the whole structure may
1786 be referenced using pointer arithmetic. See PR 21407 and the
1787 ensuing mailing list discussion. */
1788 var
= get_base_address (ref
);
1789 if (var
&& SSA_VAR_P (var
))
1791 if (*addresses_taken
== NULL
)
1792 *addresses_taken
= BITMAP_GGC_ALLOC ();
1794 if (var_can_have_subvars (var
)
1795 && (svars
= get_subvars_for_var (var
)))
1798 for (sv
= svars
; sv
; sv
= sv
->next
)
1800 bitmap_set_bit (*addresses_taken
, DECL_UID (sv
->var
));
1801 TREE_ADDRESSABLE (sv
->var
) = 1;
1806 bitmap_set_bit (*addresses_taken
, DECL_UID (var
));
1807 TREE_ADDRESSABLE (var
) = 1;
1813 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1814 clobbered variables in the function. */
1817 add_call_clobber_ops (tree stmt
, tree callee
)
1822 stmt_ann_t s_ann
= stmt_ann (stmt
);
1823 struct stmt_ann_d empty_ann
;
1824 bitmap not_read_b
, not_written_b
;
1826 /* Functions that are not const, pure or never return may clobber
1827 call-clobbered variables. */
1829 s_ann
->makes_clobbering_call
= true;
1831 /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
1832 for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
1835 add_stmt_operand (&global_var
, s_ann
, opf_is_def
);
1839 /* FIXME - if we have better information from the static vars
1840 analysis, we need to make the cache call site specific. This way
1841 we can have the performance benefits even if we are doing good
1844 /* Get info for local and module level statics. There is a bit
1845 set for each static if the call being processed does not read
1846 or write that variable. */
1848 not_read_b
= callee
? ipa_reference_get_not_read_global (callee
) : NULL
;
1849 not_written_b
= callee
? ipa_reference_get_not_written_global (callee
) : NULL
;
1851 /* If cache is valid, copy the elements into the build vectors. */
1852 if (ssa_call_clobbered_cache_valid
1853 && (!not_read_b
|| bitmap_empty_p (not_read_b
))
1854 && (!not_written_b
|| bitmap_empty_p (not_written_b
)))
1856 for (u
= 0 ; u
< VEC_length (tree
, clobbered_vuses
); u
++)
1858 t
= VEC_index (tree
, clobbered_vuses
, u
);
1859 gcc_assert (TREE_CODE (t
) != SSA_NAME
);
1860 var_ann (t
)->in_vuse_list
= 1;
1861 VEC_safe_push (tree
, heap
, build_vuses
, (tree
)t
);
1863 for (u
= 0; u
< VEC_length (tree
, clobbered_v_may_defs
); u
++)
1865 t
= VEC_index (tree
, clobbered_v_may_defs
, u
);
1866 gcc_assert (TREE_CODE (t
) != SSA_NAME
);
1867 var_ann (t
)->in_v_may_def_list
= 1;
1868 VEC_safe_push (tree
, heap
, build_v_may_defs
, (tree
)t
);
1872 s_ann
->makes_aliased_loads
= clobbered_aliased_loads
;
1873 s_ann
->makes_aliased_stores
= clobbered_aliased_stores
;
1878 memset (&empty_ann
, 0, sizeof (struct stmt_ann_d
));
1880 /* Add a V_MAY_DEF operand for every call clobbered variable. */
1881 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars
, 0, u
, bi
)
1883 tree var
= referenced_var (u
);
1884 if (unmodifiable_var_p (var
))
1885 add_stmt_operand (&var
, &empty_ann
, opf_none
);
1889 = not_read_b
? bitmap_bit_p (not_read_b
, u
) : false;
1891 = not_written_b
? bitmap_bit_p (not_written_b
, u
) : false;
1893 if ((TREE_READONLY (var
)
1894 && (TREE_STATIC (var
) || DECL_EXTERNAL (var
)))
1898 add_stmt_operand (&var
, &empty_ann
, opf_none
);
1901 add_stmt_operand (&var
, &empty_ann
, opf_is_def
);
1905 if ((!not_read_b
|| bitmap_empty_p (not_read_b
))
1906 && (!not_written_b
|| bitmap_empty_p (not_written_b
)))
1908 clobbered_aliased_loads
= empty_ann
.makes_aliased_loads
;
1909 clobbered_aliased_stores
= empty_ann
.makes_aliased_stores
;
1911 /* Set the flags for a stmt's annotation. */
1914 s_ann
->makes_aliased_loads
= empty_ann
.makes_aliased_loads
;
1915 s_ann
->makes_aliased_stores
= empty_ann
.makes_aliased_stores
;
1918 /* Prepare empty cache vectors. */
1919 VEC_truncate (tree
, clobbered_vuses
, 0);
1920 VEC_truncate (tree
, clobbered_v_may_defs
, 0);
1922 /* Now fill the clobbered cache with the values that have been found. */
1923 for (u
= 0; u
< VEC_length (tree
, build_vuses
); u
++)
1924 VEC_safe_push (tree
, heap
, clobbered_vuses
,
1925 VEC_index (tree
, build_vuses
, u
));
1927 gcc_assert (VEC_length (tree
, build_vuses
)
1928 == VEC_length (tree
, clobbered_vuses
));
1930 for (u
= 0; u
< VEC_length (tree
, build_v_may_defs
); u
++)
1931 VEC_safe_push (tree
, heap
, clobbered_v_may_defs
,
1932 VEC_index (tree
, build_v_may_defs
, u
));
1934 gcc_assert (VEC_length (tree
, build_v_may_defs
)
1935 == VEC_length (tree
, clobbered_v_may_defs
));
1937 ssa_call_clobbered_cache_valid
= true;
1942 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1946 add_call_read_ops (tree stmt
)
1951 stmt_ann_t s_ann
= stmt_ann (stmt
);
1952 struct stmt_ann_d empty_ann
;
1954 /* if the function is not pure, it may reference memory. Add
1955 a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
1956 for the heuristic used to decide whether to create .GLOBAL_VAR. */
1959 add_stmt_operand (&global_var
, s_ann
, opf_none
);
1963 /* If cache is valid, copy the elements into the build vector. */
1964 if (ssa_ro_call_cache_valid
)
1966 for (u
= 0; u
< VEC_length (tree
, ro_call_vuses
); u
++)
1968 t
= VEC_index (tree
, ro_call_vuses
, u
);
1969 gcc_assert (TREE_CODE (t
) != SSA_NAME
);
1970 var_ann (t
)->in_vuse_list
= 1;
1971 VEC_safe_push (tree
, heap
, build_vuses
, (tree
)t
);
1974 s_ann
->makes_aliased_loads
= ro_call_aliased_loads
;
1978 memset (&empty_ann
, 0, sizeof (struct stmt_ann_d
));
1980 /* Add a VUSE for each call-clobbered variable. */
1981 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars
, 0, u
, bi
)
1983 tree var
= referenced_var (u
);
1984 add_stmt_operand (&var
, &empty_ann
, opf_none
| opf_non_specific
);
1987 ro_call_aliased_loads
= empty_ann
.makes_aliased_loads
;
1989 s_ann
->makes_aliased_loads
= empty_ann
.makes_aliased_loads
;
1991 /* Prepare empty cache vectors. */
1992 VEC_truncate (tree
, ro_call_vuses
, 0);
1994 /* Now fill the clobbered cache with the values that have been found. */
1995 for (u
= 0; u
< VEC_length (tree
, build_vuses
); u
++)
1996 VEC_safe_push (tree
, heap
, ro_call_vuses
,
1997 VEC_index (tree
, build_vuses
, u
));
1999 gcc_assert (VEC_length (tree
, build_vuses
)
2000 == VEC_length (tree
, ro_call_vuses
));
2002 ssa_ro_call_cache_valid
= true;
2006 /* Scan the immediate_use list for VAR making sure its linked properly.
2007 return RTUE iof there is a problem. */
2010 verify_imm_links (FILE *f
, tree var
)
2012 use_operand_p ptr
, prev
, list
;
2015 gcc_assert (TREE_CODE (var
) == SSA_NAME
);
2017 list
= &(SSA_NAME_IMM_USE_NODE (var
));
2018 gcc_assert (list
->use
== NULL
);
2020 if (list
->prev
== NULL
)
2022 gcc_assert (list
->next
== NULL
);
2028 for (ptr
= list
->next
; ptr
!= list
; )
2030 if (prev
!= ptr
->prev
)
2033 if (ptr
->use
== NULL
)
2034 goto error
; /* 2 roots, or SAFE guard node. */
2035 else if (*(ptr
->use
) != var
)
2040 /* Avoid infinite loops. 50,000,000 uses probably indicates a problem. */
2041 if (count
++ > 50000000)
2045 /* Verify list in the other direction. */
2047 for (ptr
= list
->prev
; ptr
!= list
; )
2049 if (prev
!= ptr
->next
)
2063 if (ptr
->stmt
&& stmt_modified_p (ptr
->stmt
))
2065 fprintf (f
, " STMT MODIFIED. - <%p> ", (void *)ptr
->stmt
);
2066 print_generic_stmt (f
, ptr
->stmt
, TDF_SLIM
);
2068 fprintf (f
, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr
,
2070 print_generic_expr (f
, USE_FROM_PTR (ptr
), TDF_SLIM
);
2076 /* Dump all the immediate uses to FILE. */
2079 dump_immediate_uses_for (FILE *file
, tree var
)
2081 imm_use_iterator iter
;
2082 use_operand_p use_p
;
2084 gcc_assert (var
&& TREE_CODE (var
) == SSA_NAME
);
2086 print_generic_expr (file
, var
, TDF_SLIM
);
2087 fprintf (file
, " : -->");
2088 if (has_zero_uses (var
))
2089 fprintf (file
, " no uses.\n");
2091 if (has_single_use (var
))
2092 fprintf (file
, " single use.\n");
2094 fprintf (file
, "%d uses.\n", num_imm_uses (var
));
2096 FOR_EACH_IMM_USE_FAST (use_p
, iter
, var
)
2098 if (!is_gimple_reg (USE_FROM_PTR (use_p
)))
2099 print_generic_stmt (file
, USE_STMT (use_p
), TDF_VOPS
);
2101 print_generic_stmt (file
, USE_STMT (use_p
), TDF_SLIM
);
2103 fprintf(file
, "\n");
2106 /* Dump all the immediate uses to FILE. */
2109 dump_immediate_uses (FILE *file
)
2114 fprintf (file
, "Immediate_uses: \n\n");
2115 for (x
= 1; x
< num_ssa_names
; x
++)
2120 dump_immediate_uses_for (file
, var
);
2125 /* Dump def-use edges on stderr. */
2128 debug_immediate_uses (void)
2130 dump_immediate_uses (stderr
);
2133 /* Dump def-use edges on stderr. */
2136 debug_immediate_uses_for (tree var
)
2138 dump_immediate_uses_for (stderr
, var
);
2140 #include "gt-tree-ssa-operands.h"