1 /* GIMPLE lowering pass. Converts High GIMPLE into Low GIMPLE.
3 Copyright (C) 2003-2025 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "tree-pass.h"
28 #include "fold-const.h"
29 #include "tree-nested.h"
31 #include "gimple-iterator.h"
32 #include "gimple-low.h"
34 #include "gimple-predict.h"
35 #include "gimple-fold.h"
38 #include "value-range.h"
39 #include "stringpool.h"
40 #include "tree-ssanames.h"
41 #include "tree-inline.h"
42 #include "gimple-walk.h"
45 /* The differences between High GIMPLE and Low GIMPLE are the
48 1- Lexical scopes are removed (i.e., GIMPLE_BIND disappears).
50 2- GIMPLE_TRY and GIMPLE_CATCH are converted to abnormal control
51 flow and exception regions are built as an on-the-side region
52 hierarchy (See tree-eh.cc:lower_eh_constructs).
54 3- Multiple identical return statements are grouped into a single
55 return and gotos to the unique return site. */
57 /* Match a return statement with a label. During lowering, we identify
58 identical return statements and replace duplicates with a jump to
59 the corresponding label. */
60 struct return_statements_t
65 typedef struct return_statements_t return_statements_t
;
70 /* Block the current statement belongs to. */
73 /* A vector of label and return statements to be moved to the end
75 vec
<return_statements_t
> return_statements
;
77 /* True if the current statement cannot fall through. */
81 static void lower_stmt (gimple_stmt_iterator
*, struct lower_data
*);
82 static void lower_gimple_bind (gimple_stmt_iterator
*, struct lower_data
*);
83 static void lower_try_catch (gimple_stmt_iterator
*, struct lower_data
*);
84 static void lower_gimple_return (gimple_stmt_iterator
*, struct lower_data
*);
85 static void lower_builtin_setjmp (gimple_stmt_iterator
*);
86 static void lower_builtin_posix_memalign (gimple_stmt_iterator
*);
87 static void lower_builtin_assume_aligned (gimple_stmt_iterator
*);
90 /* Lower the body of current_function_decl from High GIMPLE into Low
94 lower_function_body (void)
96 struct lower_data data
;
97 gimple_seq body
= gimple_body (current_function_decl
);
98 gimple_seq lowered_body
;
99 gimple_stmt_iterator i
;
103 /* The gimplifier should've left a body of exactly one statement,
104 namely a GIMPLE_BIND. */
105 gcc_assert (gimple_seq_first (body
) == gimple_seq_last (body
)
106 && gimple_code (gimple_seq_first_stmt (body
)) == GIMPLE_BIND
);
108 memset (&data
, 0, sizeof (data
));
109 data
.block
= DECL_INITIAL (current_function_decl
);
110 BLOCK_SUBBLOCKS (data
.block
) = NULL_TREE
;
111 BLOCK_CHAIN (data
.block
) = NULL_TREE
;
112 TREE_ASM_WRITTEN (data
.block
) = 1;
113 data
.return_statements
.create (8);
115 bind
= gimple_seq_first_stmt (body
);
117 gimple_seq_add_stmt (&lowered_body
, bind
);
118 i
= gsi_start (lowered_body
);
119 lower_gimple_bind (&i
, &data
);
121 i
= gsi_last (lowered_body
);
123 /* If we had begin stmt markers from e.g. PCH, but this compilation
124 doesn't want them, lower_stmt will have cleaned them up; we can
125 now clear the flag that indicates we had them. */
126 if (!MAY_HAVE_DEBUG_MARKER_STMTS
&& cfun
->debug_nonbind_markers
)
128 /* This counter needs not be exact, but before lowering it will
129 most certainly be. */
130 gcc_assert (cfun
->debug_marker_count
== 0);
131 cfun
->debug_nonbind_markers
= false;
134 /* If the function falls off the end, we need a null return statement.
135 If we've already got one in the return_statements vector, we don't
136 need to do anything special. Otherwise build one by hand. */
137 bool may_fallthru
= gimple_seq_may_fallthru (lowered_body
);
139 && (data
.return_statements
.is_empty ()
140 || (gimple_return_retval (data
.return_statements
.last().stmt
)
143 x
= gimple_build_return (NULL
);
144 gimple_set_location (x
, cfun
->function_end_locus
);
145 gimple_set_block (x
, DECL_INITIAL (current_function_decl
));
146 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
147 may_fallthru
= false;
150 /* If we lowered any return statements, emit the representative
151 at the end of the function. */
152 while (!data
.return_statements
.is_empty ())
154 return_statements_t t
= data
.return_statements
.pop ();
155 x
= gimple_build_label (t
.label
);
156 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
157 gsi_insert_after (&i
, t
.stmt
, GSI_CONTINUE_LINKING
);
160 /* Remove the line number from the representative return statement.
161 It now fills in for the fallthru too. Failure to remove this
162 will result in incorrect results for coverage analysis. */
163 gimple_set_location (t
.stmt
, UNKNOWN_LOCATION
);
164 may_fallthru
= false;
168 /* Once the old body has been lowered, replace it with the new
170 gimple_set_body (current_function_decl
, lowered_body
);
172 gcc_assert (data
.block
== DECL_INITIAL (current_function_decl
));
173 BLOCK_SUBBLOCKS (data
.block
)
174 = blocks_nreverse (BLOCK_SUBBLOCKS (data
.block
));
176 clear_block_marks (data
.block
);
177 data
.return_statements
.release ();
183 const pass_data pass_data_lower_cf
=
185 GIMPLE_PASS
, /* type */
187 OPTGROUP_NONE
, /* optinfo_flags */
189 PROP_gimple_any
, /* properties_required */
190 PROP_gimple_lcf
, /* properties_provided */
191 0, /* properties_destroyed */
192 0, /* todo_flags_start */
193 0, /* todo_flags_finish */
196 class pass_lower_cf
: public gimple_opt_pass
199 pass_lower_cf (gcc::context
*ctxt
)
200 : gimple_opt_pass (pass_data_lower_cf
, ctxt
)
203 /* opt_pass methods: */
204 unsigned int execute (function
*) final override
206 return lower_function_body ();
209 }; // class pass_lower_cf
214 make_pass_lower_cf (gcc::context
*ctxt
)
216 return new pass_lower_cf (ctxt
);
219 /* Lower sequence SEQ. Unlike gimplification the statements are not relowered
220 when they are changed -- if this has to be done, the lowering routine must
221 do it explicitly. DATA is passed through the recursion. */
224 lower_sequence (gimple_seq
*seq
, struct lower_data
*data
)
226 gimple_stmt_iterator gsi
;
228 for (gsi
= gsi_start (*seq
); !gsi_end_p (gsi
); )
229 lower_stmt (&gsi
, data
);
233 /* Lower the OpenMP directive statement pointed by GSI. DATA is
234 passed through the recursion. */
237 lower_omp_directive (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
241 stmt
= gsi_stmt (*gsi
);
243 lower_sequence (gimple_omp_body_ptr (stmt
), data
);
244 gsi_insert_seq_after (gsi
, gimple_omp_body (stmt
), GSI_CONTINUE_LINKING
);
245 gimple_omp_set_body (stmt
, NULL
);
249 /* Create an artificial FUNCTION_DECL for assumption at LOC. */
252 create_assumption_fn (location_t loc
)
254 tree name
= clone_function_name_numbered (current_function_decl
, "_assume");
255 /* Temporarily, until we determine all the arguments. */
256 tree type
= build_varargs_function_type_list (boolean_type_node
, NULL_TREE
);
257 tree decl
= build_decl (loc
, FUNCTION_DECL
, name
, type
);
258 TREE_STATIC (decl
) = 1;
259 TREE_USED (decl
) = 1;
260 DECL_ARTIFICIAL (decl
) = 1;
261 DECL_IGNORED_P (decl
) = 1;
262 DECL_NAMELESS (decl
) = 1;
263 TREE_PUBLIC (decl
) = 0;
264 DECL_UNINLINABLE (decl
) = 1;
265 DECL_EXTERNAL (decl
) = 0;
266 DECL_CONTEXT (decl
) = NULL_TREE
;
267 DECL_INITIAL (decl
) = make_node (BLOCK
);
268 tree attributes
= DECL_ATTRIBUTES (current_function_decl
);
269 if (lookup_attribute ("noipa", attributes
) == NULL
)
271 attributes
= tree_cons (get_identifier ("noipa"), NULL
, attributes
);
272 if (lookup_attribute ("noinline", attributes
) == NULL
)
273 attributes
= tree_cons (get_identifier ("noinline"), NULL
, attributes
);
274 if (lookup_attribute ("noclone", attributes
) == NULL
)
275 attributes
= tree_cons (get_identifier ("noclone"), NULL
, attributes
);
276 if (lookup_attribute ("no_icf", attributes
) == NULL
)
277 attributes
= tree_cons (get_identifier ("no_icf"), NULL
, attributes
);
279 DECL_ATTRIBUTES (decl
) = attributes
;
280 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl
)) = decl
;
281 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl
)
282 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl
);
283 DECL_FUNCTION_SPECIFIC_TARGET (decl
)
284 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl
);
285 tree t
= build_decl (DECL_SOURCE_LOCATION (decl
),
286 RESULT_DECL
, NULL_TREE
, boolean_type_node
);
287 DECL_ARTIFICIAL (t
) = 1;
288 DECL_IGNORED_P (t
) = 1;
289 DECL_CONTEXT (t
) = decl
;
290 DECL_RESULT (decl
) = t
;
291 push_struct_function (decl
);
292 cfun
->function_end_locus
= loc
;
293 init_tree_ssa (cfun
);
297 struct lower_assumption_data
300 tree return_false_label
;
302 auto_vec
<tree
> decls
;
305 /* Helper function for lower_assumptions. Find local vars and labels
306 in the assumption sequence and remove debug stmts. */
309 find_assumption_locals_r (gimple_stmt_iterator
*gsi_p
, bool *,
310 struct walk_stmt_info
*wi
)
312 lower_assumption_data
*data
= (lower_assumption_data
*) wi
->info
;
313 gimple
*stmt
= gsi_stmt (*gsi_p
);
314 tree lhs
= gimple_get_lhs (stmt
);
315 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
317 gcc_assert (SSA_NAME_VAR (lhs
) == NULL_TREE
);
318 data
->id
.decl_map
->put (lhs
, NULL_TREE
);
319 data
->decls
.safe_push (lhs
);
321 switch (gimple_code (stmt
))
324 for (tree var
= gimple_bind_vars (as_a
<gbind
*> (stmt
));
325 var
; var
= DECL_CHAIN (var
))
327 && !DECL_EXTERNAL (var
)
328 && DECL_CONTEXT (var
) == data
->id
.src_fn
)
330 data
->id
.decl_map
->put (var
, var
);
331 data
->decls
.safe_push (var
);
336 tree label
= gimple_label_label (as_a
<glabel
*> (stmt
));
337 data
->id
.decl_map
->put (label
, label
);
341 /* If something in assumption tries to return from parent function,
342 if it would be reached in hypothetical evaluation, it would be UB,
343 so transform such returns into return false; */
345 gimple
*g
= gimple_build_assign (data
->guard_copy
, boolean_false_node
);
346 gsi_insert_before (gsi_p
, g
, GSI_SAME_STMT
);
347 gimple_return_set_retval (as_a
<greturn
*> (stmt
), data
->guard_copy
);
351 /* As assumptions won't be emitted, debug info stmts in them
353 gsi_remove (gsi_p
, true);
354 wi
->removed_stmt
= true;
362 /* Create a new PARM_DECL that is indentical in all respect to DECL except that
363 DECL can be either a VAR_DECL, a PARM_DECL or RESULT_DECL. The original
364 DECL must come from ID->src_fn and the copy will be part of ID->dst_fn. */
367 assumption_copy_decl (tree decl
, copy_body_data
*id
)
369 tree type
= TREE_TYPE (decl
);
371 if (is_global_var (decl
))
374 gcc_assert (VAR_P (decl
)
375 || TREE_CODE (decl
) == PARM_DECL
376 || TREE_CODE (decl
) == RESULT_DECL
);
377 if (TREE_THIS_VOLATILE (decl
))
378 type
= build_pointer_type (type
);
379 tree copy
= build_decl (DECL_SOURCE_LOCATION (decl
),
380 PARM_DECL
, DECL_NAME (decl
), type
);
381 if (DECL_PT_UID_SET_P (decl
))
382 SET_DECL_PT_UID (copy
, DECL_PT_UID (decl
));
383 TREE_THIS_VOLATILE (copy
) = 0;
384 if (TREE_THIS_VOLATILE (decl
))
385 TREE_READONLY (copy
) = 1;
388 TREE_ADDRESSABLE (copy
) = TREE_ADDRESSABLE (decl
);
389 TREE_READONLY (copy
) = TREE_READONLY (decl
);
390 DECL_NOT_GIMPLE_REG_P (copy
) = DECL_NOT_GIMPLE_REG_P (decl
);
391 DECL_BY_REFERENCE (copy
) = DECL_BY_REFERENCE (decl
);
393 DECL_ARG_TYPE (copy
) = type
;
394 ((lower_assumption_data
*) id
)->decls
.safe_push (decl
);
395 return copy_decl_for_dup_finish (id
, decl
, copy
);
398 /* Transform gotos out of the assumption into return false. */
401 adjust_assumption_stmt_r (gimple_stmt_iterator
*gsi_p
, bool *,
402 struct walk_stmt_info
*wi
)
404 lower_assumption_data
*data
= (lower_assumption_data
*) wi
->info
;
405 gimple
*stmt
= gsi_stmt (*gsi_p
);
406 tree lab
= NULL_TREE
;
407 unsigned int idx
= 0;
408 if (gimple_code (stmt
) == GIMPLE_GOTO
)
409 lab
= gimple_goto_dest (stmt
);
410 else if (gimple_code (stmt
) == GIMPLE_COND
)
414 lab
= gimple_cond_true_label (as_a
<gcond
*> (stmt
));
416 lab
= gimple_cond_false_label (as_a
<gcond
*> (stmt
));
418 else if (gimple_code (stmt
) == GIMPLE_LABEL
)
420 tree label
= gimple_label_label (as_a
<glabel
*> (stmt
));
421 DECL_CONTEXT (label
) = current_function_decl
;
425 if (!data
->id
.decl_map
->get (lab
))
427 if (!data
->return_false_label
)
428 data
->return_false_label
429 = create_artificial_label (UNKNOWN_LOCATION
);
430 if (gimple_code (stmt
) == GIMPLE_GOTO
)
431 gimple_goto_set_dest (as_a
<ggoto
*> (stmt
),
432 data
->return_false_label
);
434 gimple_cond_set_true_label (as_a
<gcond
*> (stmt
),
435 data
->return_false_label
);
437 gimple_cond_set_false_label (as_a
<gcond
*> (stmt
),
438 data
->return_false_label
);
440 if (gimple_code (stmt
) == GIMPLE_COND
&& idx
== 0)
449 /* Adjust trees in the assumption body. Called through walk_tree. */
452 adjust_assumption_stmt_op (tree
*tp
, int *, void *datap
)
454 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) datap
;
455 lower_assumption_data
*data
= (lower_assumption_data
*) wi
->info
;
458 switch (TREE_CODE (t
))
461 newt
= data
->id
.decl_map
->get (t
);
462 /* There shouldn't be SSA_NAMEs other than ones defined in the
463 assumption's body. */
468 newt
= data
->id
.decl_map
->get (t
);
475 *tp
= remap_decl (t
, &data
->id
);
476 if (TREE_THIS_VOLATILE (t
) && *tp
!= t
)
478 *tp
= build_simple_mem_ref (*tp
);
479 TREE_THIS_NOTRAP (*tp
) = 1;
489 The gimplifier transformed:
496 which we should transform into:
497 .ASSUME (&artificial_fn, args...);
498 where artificial_fn will look like:
499 bool artificial_fn (args...)
504 with any debug stmts in the block removed and jumps out of
505 the block or return stmts replaced with return false; */
508 lower_assumption (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
510 gimple
*stmt
= gsi_stmt (*gsi
);
511 tree guard
= gimple_assume_guard (stmt
);
512 gimple
*bind
= gimple_assume_body (stmt
);
513 location_t loc
= gimple_location (stmt
);
514 gcc_assert (gimple_code (bind
) == GIMPLE_BIND
);
516 lower_assumption_data lad
;
517 hash_map
<tree
, tree
> decl_map
;
518 memset (&lad
.id
, 0, sizeof (lad
.id
));
519 lad
.return_false_label
= NULL_TREE
;
520 lad
.id
.src_fn
= current_function_decl
;
521 lad
.id
.dst_fn
= create_assumption_fn (loc
);
522 lad
.id
.src_cfun
= DECL_STRUCT_FUNCTION (lad
.id
.src_fn
);
523 lad
.id
.decl_map
= &decl_map
;
524 lad
.id
.copy_decl
= assumption_copy_decl
;
525 lad
.id
.transform_call_graph_edges
= CB_CGE_DUPLICATE
;
526 lad
.id
.transform_parameter
= true;
527 lad
.id
.do_not_unshare
= true;
528 lad
.id
.do_not_fold
= true;
529 cfun
->curr_properties
= lad
.id
.src_cfun
->curr_properties
;
530 lad
.guard_copy
= create_tmp_var (boolean_type_node
);
531 decl_map
.put (lad
.guard_copy
, lad
.guard_copy
);
532 decl_map
.put (guard
, lad
.guard_copy
);
533 cfun
->assume_function
= 1;
535 /* Find variables, labels and SSA_NAMEs local to the assume GIMPLE_BIND. */
536 gimple_stmt_iterator gsi2
= gsi_start (*gimple_assume_body_ptr (stmt
));
537 struct walk_stmt_info wi
;
538 memset (&wi
, 0, sizeof (wi
));
539 wi
.info
= (void *) &lad
;
540 walk_gimple_stmt (&gsi2
, find_assumption_locals_r
, NULL
, &wi
);
541 unsigned int sz
= lad
.decls
.length ();
542 for (unsigned i
= 0; i
< sz
; ++i
)
544 tree v
= lad
.decls
[i
];
546 /* SSA_NAMEs defined in the assume condition should be replaced
547 by new SSA_NAMEs in the artificial function. */
548 if (TREE_CODE (v
) == SSA_NAME
)
550 newv
= make_ssa_name (remap_type (TREE_TYPE (v
), &lad
.id
));
551 decl_map
.put (v
, newv
);
553 /* Local vars should have context and type adjusted to the
554 new artificial function. */
557 if (is_global_var (v
) && !DECL_ASSEMBLER_NAME_SET_P (v
))
558 DECL_ASSEMBLER_NAME (v
);
559 TREE_TYPE (v
) = remap_type (TREE_TYPE (v
), &lad
.id
);
560 DECL_CONTEXT (v
) = current_function_decl
;
563 /* References to other automatic vars should be replaced by
564 PARM_DECLs to the artificial function. */
565 memset (&wi
, 0, sizeof (wi
));
566 wi
.info
= (void *) &lad
;
567 walk_gimple_stmt (&gsi2
, adjust_assumption_stmt_r
,
568 adjust_assumption_stmt_op
, &wi
);
570 /* At the start prepend guard = false; */
571 gimple_seq body
= NULL
;
572 gimple
*g
= gimple_build_assign (lad
.guard_copy
, boolean_false_node
);
573 gimple_seq_add_stmt (&body
, g
);
574 gimple_seq_add_stmt (&body
, bind
);
575 /* At the end add return guard; */
576 greturn
*gr
= gimple_build_return (lad
.guard_copy
);
577 gimple_seq_add_stmt (&body
, gr
);
578 /* If there were any jumps to labels outside of the condition,
579 replace them with a jump to
583 if (lad
.return_false_label
)
585 g
= gimple_build_label (lad
.return_false_label
);
586 gimple_seq_add_stmt (&body
, g
);
587 g
= gimple_build_assign (lad
.guard_copy
, boolean_false_node
);
588 gimple_seq_add_stmt (&body
, g
);
589 gr
= gimple_build_return (lad
.guard_copy
);
590 gimple_seq_add_stmt (&body
, gr
);
592 bind
= gimple_build_bind (NULL_TREE
, body
, NULL_TREE
);
594 gimple_seq_add_stmt (&body
, bind
);
595 gimple_set_body (current_function_decl
, body
);
598 tree parms
= NULL_TREE
;
599 tree parmt
= void_list_node
;
600 auto_vec
<tree
, 8> vargs
;
601 vargs
.safe_grow (1 + (lad
.decls
.length () - sz
), true);
602 /* First argument to IFN_ASSUME will be address of the
603 artificial function. */
604 vargs
[0] = build_fold_addr_expr (lad
.id
.dst_fn
);
605 for (unsigned i
= lad
.decls
.length (); i
> sz
; --i
)
607 tree
*v
= decl_map
.get (lad
.decls
[i
- 1]);
608 gcc_assert (v
&& TREE_CODE (*v
) == PARM_DECL
);
609 DECL_CHAIN (*v
) = parms
;
611 parmt
= tree_cons (NULL_TREE
, TREE_TYPE (*v
), parmt
);
612 /* Remaining arguments will be the variables/parameters
613 mentioned in the condition. */
614 vargs
[i
- sz
] = lad
.decls
[i
- 1];
615 if (TREE_THIS_VOLATILE (lad
.decls
[i
- 1]))
617 TREE_ADDRESSABLE (lad
.decls
[i
- 1]) = 1;
618 vargs
[i
- sz
] = build_fold_addr_expr (lad
.decls
[i
- 1]);
620 /* If they have gimple types, we might need to regimplify
621 them to make the IFN_ASSUME call valid. */
622 if (is_gimple_reg_type (TREE_TYPE (vargs
[i
- sz
]))
623 && !is_gimple_val (vargs
[i
- sz
]))
625 tree t
= make_ssa_name (TREE_TYPE (vargs
[i
- sz
]));
626 g
= gimple_build_assign (t
, vargs
[i
- sz
]);
627 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
631 DECL_ARGUMENTS (lad
.id
.dst_fn
) = parms
;
632 TREE_TYPE (lad
.id
.dst_fn
) = build_function_type (boolean_type_node
, parmt
);
634 cgraph_node::add_new_function (lad
.id
.dst_fn
, false);
636 for (unsigned i
= 0; i
< sz
; ++i
)
638 tree v
= lad
.decls
[i
];
639 if (TREE_CODE (v
) == SSA_NAME
)
640 release_ssa_name (v
);
643 data
->cannot_fallthru
= false;
644 /* Replace GIMPLE_ASSUME statement with IFN_ASSUME call. */
645 gcall
*call
= gimple_build_call_internal_vec (IFN_ASSUME
, vargs
);
646 gimple_set_location (call
, loc
);
647 gsi_replace (gsi
, call
, true);
650 /* Lower statement GSI. DATA is passed through the recursion. We try to
651 track the fallthruness of statements and get rid of unreachable return
652 statements in order to prevent the EH lowering pass from adding useless
653 edges that can cause bogus warnings to be issued later; this guess need
654 not be 100% accurate, simply be conservative and reset cannot_fallthru
655 to false if we don't know. */
658 lower_stmt (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
660 gimple
*stmt
= gsi_stmt (*gsi
);
662 gimple_set_block (stmt
, data
->block
);
664 switch (gimple_code (stmt
))
667 lower_gimple_bind (gsi
, data
);
668 /* Propagate fallthruness. */
674 data
->cannot_fallthru
= true;
679 if (data
->cannot_fallthru
)
681 gsi_remove (gsi
, false);
682 /* Propagate fallthruness. */
686 lower_gimple_return (gsi
, data
);
687 data
->cannot_fallthru
= true;
692 if (gimple_try_kind (stmt
) == GIMPLE_TRY_CATCH
)
693 lower_try_catch (gsi
, data
);
696 /* It must be a GIMPLE_TRY_FINALLY. */
697 bool cannot_fallthru
;
698 lower_sequence (gimple_try_eval_ptr (stmt
), data
);
699 cannot_fallthru
= data
->cannot_fallthru
;
701 /* The finally clause is always executed after the try clause,
702 so if it does not fall through, then the try-finally will not
703 fall through. Otherwise, if the try clause does not fall
704 through, then when the finally clause falls through it will
705 resume execution wherever the try clause was going. So the
706 whole try-finally will only fall through if both the try
707 clause and the finally clause fall through. */
708 data
->cannot_fallthru
= false;
709 lower_sequence (gimple_try_cleanup_ptr (stmt
), data
);
710 data
->cannot_fallthru
|= cannot_fallthru
;
717 geh_else
*eh_else_stmt
= as_a
<geh_else
*> (stmt
);
718 lower_sequence (gimple_eh_else_n_body_ptr (eh_else_stmt
), data
);
719 lower_sequence (gimple_eh_else_e_body_ptr (eh_else_stmt
), data
);
724 gcc_checking_assert (cfun
->debug_nonbind_markers
);
725 /* We can't possibly have debug bind stmts before lowering, we
726 first emit them when entering SSA. */
727 gcc_checking_assert (gimple_debug_nonbind_marker_p (stmt
));
728 /* Propagate fallthruness. */
729 /* If the function (e.g. from PCH) had debug stmts, but they're
730 disabled for this compilation, remove them. */
731 if (!MAY_HAVE_DEBUG_MARKER_STMTS
)
732 gsi_remove (gsi
, true);
737 case GIMPLE_OMP_STRUCTURED_BLOCK
:
738 /* These are supposed to be removed already in OMP lowering. */
746 case GIMPLE_EH_MUST_NOT_THROW
:
748 case GIMPLE_OMP_SCOPE
:
749 case GIMPLE_OMP_DISPATCH
:
750 case GIMPLE_OMP_SECTIONS
:
751 case GIMPLE_OMP_SECTIONS_SWITCH
:
752 case GIMPLE_OMP_SECTION
:
753 case GIMPLE_OMP_SINGLE
:
754 case GIMPLE_OMP_MASTER
:
755 case GIMPLE_OMP_MASKED
:
756 case GIMPLE_OMP_TASKGROUP
:
757 case GIMPLE_OMP_ORDERED
:
758 case GIMPLE_OMP_SCAN
:
759 case GIMPLE_OMP_CRITICAL
:
760 case GIMPLE_OMP_RETURN
:
761 case GIMPLE_OMP_ATOMIC_LOAD
:
762 case GIMPLE_OMP_ATOMIC_STORE
:
763 case GIMPLE_OMP_CONTINUE
:
768 tree decl
= gimple_call_fndecl (stmt
);
771 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
773 tree arg
= gimple_call_arg (stmt
, i
);
775 TREE_SET_BLOCK (arg
, data
->block
);
779 && fndecl_built_in_p (decl
, BUILT_IN_NORMAL
))
781 if (DECL_FUNCTION_CODE (decl
) == BUILT_IN_SETJMP
)
783 lower_builtin_setjmp (gsi
);
784 data
->cannot_fallthru
= false;
787 else if (DECL_FUNCTION_CODE (decl
) == BUILT_IN_POSIX_MEMALIGN
789 && gimple_builtin_call_types_compatible_p (stmt
, decl
))
791 lower_builtin_posix_memalign (gsi
);
794 else if (DECL_FUNCTION_CODE (decl
) == BUILT_IN_ASSUME_ALIGNED
797 lower_builtin_assume_aligned (gsi
);
798 data
->cannot_fallthru
= false;
804 if (decl
&& (flags_from_decl_or_type (decl
) & ECF_NORETURN
))
806 data
->cannot_fallthru
= true;
811 if (gimple_call_internal_p (stmt
, IFN_ASAN_MARK
))
813 tree base
= gimple_call_arg (stmt
, 1);
814 gcc_checking_assert (TREE_CODE (base
) == ADDR_EXPR
);
815 tree decl
= TREE_OPERAND (base
, 0);
816 if (VAR_P (decl
) && TREE_STATIC (decl
))
818 /* Don't poison a variable with static storage; it might have
819 gotten marked before gimplify_init_constructor promoted it
821 gsi_remove (gsi
, true);
826 /* We delay folding of built calls from gimplification to
827 here so the IL is in consistent state for the diagnostic
829 if (gimple_call_builtin_p (stmt
))
834 case GIMPLE_OMP_PARALLEL
:
835 case GIMPLE_OMP_TASK
:
836 case GIMPLE_OMP_TARGET
:
837 case GIMPLE_OMP_TEAMS
:
838 data
->cannot_fallthru
= false;
839 lower_omp_directive (gsi
, data
);
840 data
->cannot_fallthru
= false;
844 lower_assumption (gsi
, data
);
847 case GIMPLE_TRANSACTION
:
848 lower_sequence (gimple_transaction_body_ptr (
849 as_a
<gtransaction
*> (stmt
)),
857 data
->cannot_fallthru
= false;
861 /* Lower a bind_expr TSI. DATA is passed through the recursion. */
864 lower_gimple_bind (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
866 tree old_block
= data
->block
;
867 gbind
*stmt
= as_a
<gbind
*> (gsi_stmt (*gsi
));
868 tree new_block
= gimple_bind_block (stmt
);
872 if (new_block
== old_block
)
874 /* The outermost block of the original function may not be the
875 outermost statement chain of the gimplified function. So we
876 may see the outermost block just inside the function. */
877 gcc_assert (new_block
== DECL_INITIAL (current_function_decl
));
882 /* We do not expect to handle duplicate blocks. */
883 gcc_assert (!TREE_ASM_WRITTEN (new_block
));
884 TREE_ASM_WRITTEN (new_block
) = 1;
886 /* Block tree may get clobbered by inlining. Normally this would
887 be fixed in rest_of_decl_compilation using block notes, but
888 since we are not going to emit them, it is up to us. */
889 BLOCK_CHAIN (new_block
) = BLOCK_SUBBLOCKS (old_block
);
890 BLOCK_SUBBLOCKS (old_block
) = new_block
;
891 BLOCK_SUBBLOCKS (new_block
) = NULL_TREE
;
892 BLOCK_SUPERCONTEXT (new_block
) = old_block
;
894 data
->block
= new_block
;
898 record_vars (gimple_bind_vars (stmt
));
900 /* Scrap DECL_CHAIN up to BLOCK_VARS to ease GC after we no longer
901 need gimple_bind_vars. */
903 /* BLOCK_VARS and gimple_bind_vars share a common sub-chain. Find
904 it by marking all BLOCK_VARS. */
905 if (gimple_bind_block (stmt
))
906 for (tree t
= BLOCK_VARS (gimple_bind_block (stmt
)); t
; t
= DECL_CHAIN (t
))
907 TREE_VISITED (t
) = 1;
908 for (tree var
= gimple_bind_vars (stmt
);
909 var
&& ! TREE_VISITED (var
); var
= next
)
911 next
= DECL_CHAIN (var
);
912 DECL_CHAIN (var
) = NULL_TREE
;
914 /* Unmark BLOCK_VARS. */
915 if (gimple_bind_block (stmt
))
916 for (tree t
= BLOCK_VARS (gimple_bind_block (stmt
)); t
; t
= DECL_CHAIN (t
))
917 TREE_VISITED (t
) = 0;
919 lower_sequence (gimple_bind_body_ptr (stmt
), data
);
923 gcc_assert (data
->block
== new_block
);
925 BLOCK_SUBBLOCKS (new_block
)
926 = blocks_nreverse (BLOCK_SUBBLOCKS (new_block
));
927 data
->block
= old_block
;
930 /* The GIMPLE_BIND no longer carries any useful information -- kill it. */
931 gsi_insert_seq_before (gsi
, gimple_bind_body (stmt
), GSI_SAME_STMT
);
932 gsi_remove (gsi
, false);
935 /* Same as above, but for a GIMPLE_TRY_CATCH. */
938 lower_try_catch (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
940 bool cannot_fallthru
;
941 gimple
*stmt
= gsi_stmt (*gsi
);
942 gimple_stmt_iterator i
;
944 /* We don't handle GIMPLE_TRY_FINALLY. */
945 gcc_assert (gimple_try_kind (stmt
) == GIMPLE_TRY_CATCH
);
947 lower_sequence (gimple_try_eval_ptr (stmt
), data
);
948 cannot_fallthru
= data
->cannot_fallthru
;
950 i
= gsi_start (*gimple_try_cleanup_ptr (stmt
));
951 switch (gimple_code (gsi_stmt (i
)))
954 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
955 catch expression and a body. The whole try/catch may fall
956 through iff any of the catch bodies falls through. */
957 for (; !gsi_end_p (i
); gsi_next (&i
))
959 data
->cannot_fallthru
= false;
960 lower_sequence (gimple_catch_handler_ptr (
961 as_a
<gcatch
*> (gsi_stmt (i
))),
963 if (!data
->cannot_fallthru
)
964 cannot_fallthru
= false;
968 case GIMPLE_EH_FILTER
:
969 /* The exception filter expression only matters if there is an
970 exception. If the exception does not match EH_FILTER_TYPES,
971 we will execute EH_FILTER_FAILURE, and we will fall through
972 if that falls through. If the exception does match
973 EH_FILTER_TYPES, the stack unwinder will continue up the
974 stack, so we will not fall through. We don't know whether we
975 will throw an exception which matches EH_FILTER_TYPES or not,
976 so we just ignore EH_FILTER_TYPES and assume that we might
977 throw an exception which doesn't match. */
978 data
->cannot_fallthru
= false;
979 lower_sequence (gimple_eh_filter_failure_ptr (gsi_stmt (i
)), data
);
980 if (!data
->cannot_fallthru
)
981 cannot_fallthru
= false;
985 gcc_checking_assert (gimple_debug_begin_stmt_p (stmt
));
989 /* This case represents statements to be executed when an
990 exception occurs. Those statements are implicitly followed
991 by a GIMPLE_RESX to resume execution after the exception. So
992 in this case the try/catch never falls through. */
993 data
->cannot_fallthru
= false;
994 lower_sequence (gimple_try_cleanup_ptr (stmt
), data
);
998 data
->cannot_fallthru
= cannot_fallthru
;
1003 /* Try to determine whether a TRY_CATCH expression can fall through.
1004 This is a subroutine of gimple_stmt_may_fallthru. */
1007 gimple_try_catch_may_fallthru (gtry
*stmt
)
1009 gimple_stmt_iterator i
;
1011 /* We don't handle GIMPLE_TRY_FINALLY. */
1012 gcc_assert (gimple_try_kind (stmt
) == GIMPLE_TRY_CATCH
);
1014 /* If the TRY block can fall through, the whole TRY_CATCH can
1016 if (gimple_seq_may_fallthru (gimple_try_eval (stmt
)))
1019 i
= gsi_start (*gimple_try_cleanup_ptr (stmt
));
1020 switch (gimple_code (gsi_stmt (i
)))
1023 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
1024 catch expression and a body. The whole try/catch may fall
1025 through iff any of the catch bodies falls through. */
1026 for (; !gsi_end_p (i
); gsi_next (&i
))
1028 if (gimple_seq_may_fallthru (gimple_catch_handler (
1029 as_a
<gcatch
*> (gsi_stmt (i
)))))
1034 case GIMPLE_EH_FILTER
:
1035 /* The exception filter expression only matters if there is an
1036 exception. If the exception does not match EH_FILTER_TYPES,
1037 we will execute EH_FILTER_FAILURE, and we will fall through
1038 if that falls through. If the exception does match
1039 EH_FILTER_TYPES, the stack unwinder will continue up the
1040 stack, so we will not fall through. We don't know whether we
1041 will throw an exception which matches EH_FILTER_TYPES or not,
1042 so we just ignore EH_FILTER_TYPES and assume that we might
1043 throw an exception which doesn't match. */
1044 return gimple_seq_may_fallthru (gimple_eh_filter_failure (gsi_stmt (i
)));
1047 /* This case represents statements to be executed when an
1048 exception occurs. Those statements are implicitly followed
1049 by a GIMPLE_RESX to resume execution after the exception. So
1050 in this case the try/catch never falls through. */
1056 /* Try to determine if we can continue executing the statement
1057 immediately following STMT. This guess need not be 100% accurate;
1058 simply be conservative and return true if we don't know. This is
1059 used only to avoid stupidly generating extra code. If we're wrong,
1060 we'll just delete the extra code later. */
1063 gimple_stmt_may_fallthru (gimple
*stmt
)
1068 switch (gimple_code (stmt
))
1073 /* Easy cases. If the last statement of the seq implies
1074 control transfer, then we can't fall through. */
1078 /* Switch has already been lowered and represents a branch
1079 to a selected label and hence can't fall through. */
1083 /* GIMPLE_COND's are already lowered into a two-way branch. They
1084 can't fall through. */
1088 return gimple_seq_may_fallthru (
1089 gimple_bind_body (as_a
<gbind
*> (stmt
)));
1092 if (gimple_try_kind (stmt
) == GIMPLE_TRY_CATCH
)
1093 return gimple_try_catch_may_fallthru (as_a
<gtry
*> (stmt
));
1095 /* It must be a GIMPLE_TRY_FINALLY. */
1097 /* The finally clause is always executed after the try clause,
1098 so if it does not fall through, then the try-finally will not
1099 fall through. Otherwise, if the try clause does not fall
1100 through, then when the finally clause falls through it will
1101 resume execution wherever the try clause was going. So the
1102 whole try-finally will only fall through if both the try
1103 clause and the finally clause fall through. */
1104 return (gimple_seq_may_fallthru (gimple_try_eval (stmt
))
1105 && gimple_seq_may_fallthru (gimple_try_cleanup (stmt
)));
1107 case GIMPLE_EH_ELSE
:
1109 geh_else
*eh_else_stmt
= as_a
<geh_else
*> (stmt
);
1110 return (gimple_seq_may_fallthru (gimple_eh_else_n_body (eh_else_stmt
))
1111 || gimple_seq_may_fallthru (gimple_eh_else_e_body (
1116 /* Functions that do not return do not fall through. */
1117 return !gimple_call_noreturn_p (stmt
);
1125 /* Same as gimple_stmt_may_fallthru, but for the gimple sequence SEQ. */
1128 gimple_seq_may_fallthru (gimple_seq seq
)
1130 return gimple_stmt_may_fallthru (gimple_seq_last_nondebug_stmt (seq
));
1134 /* Lower a GIMPLE_RETURN GSI. DATA is passed through the recursion. */
1137 lower_gimple_return (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
1139 greturn
*stmt
= as_a
<greturn
*> (gsi_stmt (*gsi
));
1142 return_statements_t tmp_rs
;
1144 /* Match this up with an existing return statement that's been created. */
1145 for (i
= data
->return_statements
.length () - 1;
1148 tmp_rs
= data
->return_statements
[i
];
1150 if (gimple_return_retval (stmt
) == gimple_return_retval (tmp_rs
.stmt
))
1152 /* Remove the line number from the representative return statement.
1153 It now fills in for many such returns. Failure to remove this
1154 will result in incorrect results for coverage analysis. */
1155 gimple_set_location (tmp_rs
.stmt
, UNKNOWN_LOCATION
);
1161 /* Not found. Create a new label and record the return statement. */
1162 tmp_rs
.label
= create_artificial_label (cfun
->function_end_locus
);
1164 data
->return_statements
.safe_push (tmp_rs
);
1166 /* Generate a goto statement and remove the return statement. */
1168 /* When not optimizing, make sure user returns are preserved. */
1169 if (!optimize
&& gimple_has_location (stmt
))
1170 DECL_ARTIFICIAL (tmp_rs
.label
) = 0;
1171 t
= gimple_build_goto (tmp_rs
.label
);
1172 /* location includes block. */
1173 gimple_set_location (t
, gimple_location (stmt
));
1174 gsi_insert_before (gsi
, t
, GSI_SAME_STMT
);
1175 gsi_remove (gsi
, false);
1178 /* Lower a __builtin_setjmp GSI.
1180 __builtin_setjmp is passed a pointer to an array of five words (not
1181 all will be used on all machines). It operates similarly to the C
1182 library function of the same name, but is more efficient.
1184 It is lowered into 2 other builtins, namely __builtin_setjmp_setup,
1185 __builtin_setjmp_receiver.
1187 After full lowering, the body of the function should look like:
1195 __builtin_setjmp_setup (&buf, &<D1847>);
1199 __builtin_setjmp_receiver (&<D1847>);
1202 if (D.1844 == 0) goto <D1848>; else goto <D1849>;
1206 __builtin_setjmp_setup (&buf, &<D2847>);
1210 __builtin_setjmp_receiver (&<D2847>);
1213 if (D.2844 == 0) goto <D2848>; else goto <D2849>;
1221 During cfg creation an extra per-function (or per-OpenMP region)
1222 block with ABNORMAL_DISPATCHER internal call will be added, unique
1223 destination of all the abnormal call edges and the unique source of
1224 all the abnormal edges to the receivers, thus keeping the complexity
1225 explosion localized. */
1228 lower_builtin_setjmp (gimple_stmt_iterator
*gsi
)
1230 gimple
*stmt
= gsi_stmt (*gsi
);
1231 location_t loc
= gimple_location (stmt
);
1232 tree cont_label
= create_artificial_label (loc
);
1233 tree next_label
= create_artificial_label (loc
);
1237 /* __builtin_setjmp_{setup,receiver} aren't ECF_RETURNS_TWICE and for RTL
1238 these builtins are modelled as non-local label jumps to the label
1239 that is passed to these two builtins, so pretend we have a non-local
1240 label during GIMPLE passes too. See PR60003. */
1241 cfun
->has_nonlocal_label
= 1;
1243 /* NEXT_LABEL is the label __builtin_longjmp will jump to. Its address is
1244 passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver. */
1245 FORCED_LABEL (next_label
) = 1;
1247 tree orig_dest
= dest
= gimple_call_lhs (stmt
);
1248 if (orig_dest
&& TREE_CODE (orig_dest
) == SSA_NAME
)
1249 dest
= create_tmp_reg (TREE_TYPE (orig_dest
));
1251 /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert. */
1252 arg
= build_addr (next_label
);
1253 t
= builtin_decl_implicit (BUILT_IN_SETJMP_SETUP
);
1254 g
= gimple_build_call (t
, 2, gimple_call_arg (stmt
, 0), arg
);
1255 /* location includes block. */
1256 gimple_set_location (g
, loc
);
1257 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
1259 /* Build 'DEST = 0' and insert. */
1262 g
= gimple_build_assign (dest
, build_zero_cst (TREE_TYPE (dest
)));
1263 gimple_set_location (g
, loc
);
1264 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
1267 /* Build 'goto CONT_LABEL' and insert. */
1268 g
= gimple_build_goto (cont_label
);
1269 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
1271 /* Build 'NEXT_LABEL:' and insert. */
1272 g
= gimple_build_label (next_label
);
1273 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
1275 /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert. */
1276 arg
= build_addr (next_label
);
1277 t
= builtin_decl_implicit (BUILT_IN_SETJMP_RECEIVER
);
1278 g
= gimple_build_call (t
, 1, arg
);
1279 gimple_set_location (g
, loc
);
1280 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
1282 /* Build 'DEST = 1' and insert. */
1285 g
= gimple_build_assign (dest
, fold_convert_loc (loc
, TREE_TYPE (dest
),
1287 gimple_set_location (g
, loc
);
1288 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
1291 /* Build 'CONT_LABEL:' and insert. */
1292 g
= gimple_build_label (cont_label
);
1293 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
1295 /* Build orig_dest = dest if necessary. */
1296 if (dest
!= orig_dest
)
1298 g
= gimple_build_assign (orig_dest
, dest
);
1299 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
1302 /* Remove the call to __builtin_setjmp. */
1303 gsi_remove (gsi
, false);
1306 /* Lower calls to posix_memalign to
1307 res = posix_memalign (ptr, align, size);
1309 *ptr = __builtin_assume_aligned (*ptr, align);
1312 res = posix_memalign (&tem, align, size);
1314 ptr = __builtin_assume_aligned (tem, align);
1315 in case the first argument was &ptr. That way we can get at the
1316 alignment of the heap pointer in CCP. */
1319 lower_builtin_posix_memalign (gimple_stmt_iterator
*gsi
)
1321 gimple
*stmt
, *call
= gsi_stmt (*gsi
);
1322 tree pptr
= gimple_call_arg (call
, 0);
1323 tree align
= gimple_call_arg (call
, 1);
1324 tree res
= gimple_call_lhs (call
);
1325 tree ptr
= create_tmp_reg (ptr_type_node
);
1326 if (TREE_CODE (pptr
) == ADDR_EXPR
)
1328 tree tem
= create_tmp_var (ptr_type_node
);
1329 TREE_ADDRESSABLE (tem
) = 1;
1330 gimple_call_set_arg (call
, 0, build_fold_addr_expr (tem
));
1331 stmt
= gimple_build_assign (ptr
, tem
);
1334 stmt
= gimple_build_assign (ptr
,
1335 fold_build2 (MEM_REF
, ptr_type_node
, pptr
,
1336 build_int_cst (ptr_type_node
, 0)));
1337 if (res
== NULL_TREE
)
1339 res
= create_tmp_reg (integer_type_node
);
1340 gimple_call_set_lhs (call
, res
);
1342 tree align_label
= create_artificial_label (UNKNOWN_LOCATION
);
1343 tree noalign_label
= create_artificial_label (UNKNOWN_LOCATION
);
1344 gimple
*cond
= gimple_build_cond (EQ_EXPR
, res
, integer_zero_node
,
1345 align_label
, noalign_label
);
1346 gsi_insert_after (gsi
, cond
, GSI_NEW_STMT
);
1347 gsi_insert_after (gsi
, gimple_build_label (align_label
), GSI_NEW_STMT
);
1348 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
1349 stmt
= gimple_build_call (builtin_decl_implicit (BUILT_IN_ASSUME_ALIGNED
),
1351 gimple_call_set_lhs (stmt
, ptr
);
1352 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
1353 stmt
= gimple_build_assign (fold_build2 (MEM_REF
, ptr_type_node
, pptr
,
1354 build_int_cst (ptr_type_node
, 0)),
1356 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
1357 gsi_insert_after (gsi
, gimple_build_label (noalign_label
), GSI_NEW_STMT
);
1360 /* Lower calls to __builtin_assume_aligned when not optimizing. */
1363 lower_builtin_assume_aligned (gimple_stmt_iterator
*gsi
)
1365 gcall
*call
= as_a
<gcall
*> (gsi_stmt (*gsi
));
1367 tree lhs
= gimple_call_lhs (call
);
1368 if (!lhs
|| !POINTER_TYPE_P (TREE_TYPE (lhs
)) || TREE_CODE (lhs
) != SSA_NAME
)
1371 tree align
= gimple_call_arg (call
, 1);
1372 tree misalign
= (gimple_call_num_args (call
) > 2
1373 ? gimple_call_arg (call
, 2) : NULL_TREE
);
1374 if (!tree_fits_uhwi_p (align
)
1375 || (misalign
&& !tree_fits_uhwi_p (misalign
)))
1378 unsigned aligni
= TREE_INT_CST_LOW (align
);
1379 unsigned misaligni
= misalign
? TREE_INT_CST_LOW (misalign
) : 0;
1381 || (aligni
& (aligni
- 1)) != 0
1382 || (misaligni
& ~(aligni
- 1)) != 0)
1385 /* For lowering we simply transfer alignment information to the
1386 result and leave the call otherwise unchanged, it will be elided
1387 at RTL expansion time. */
1388 ptr_info_def
*pi
= get_ptr_info (lhs
);
1389 set_ptr_info_alignment (pi
, aligni
, misaligni
);
1393 /* Record the variables in VARS into function FN. */
1396 record_vars_into (tree vars
, tree fn
)
1398 for (; vars
; vars
= DECL_CHAIN (vars
))
1402 /* BIND_EXPRs contains also function/type/constant declarations
1403 we don't need to care about. */
1407 /* Nothing to do in this case. */
1408 if (DECL_EXTERNAL (var
))
1411 /* Record the variable. */
1412 add_local_decl (DECL_STRUCT_FUNCTION (fn
), var
);
1417 /* Record the variables in VARS into current_function_decl. */
1420 record_vars (tree vars
)
1422 record_vars_into (vars
, current_function_decl
);