1 /* Lower _BitInt(N) operations to scalar operations.
2 Copyright (C) 2023-2025 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
29 #include "tree-pass.h"
31 #include "fold-const.h"
33 #include "gimple-iterator.h"
39 #include "tree-ssa-live.h"
40 #include "tree-ssa-coalesce.h"
45 #include "gimple-range.h"
46 #include "value-range.h"
47 #include "langhooks.h"
48 #include "gimplify-me.h"
49 #include "diagnostic-core.h"
51 #include "tree-pretty-print.h"
52 #include "alloc-pool.h"
53 #include "tree-into-ssa.h"
54 #include "tree-cfgcleanup.h"
55 #include "tree-switch-conversion.h"
57 #include "stor-layout.h"
58 #include "gimple-lower-bitint.h"
60 /* Split BITINT_TYPE precisions in 4 categories. Small _BitInt, where
61 target hook says it is a single limb, middle _BitInt which per ABI
62 does not, but there is some INTEGER_TYPE in which arithmetics can be
63 performed (operations on such _BitInt are lowered to casts to that
64 arithmetic type and cast back; e.g. on x86_64 limb is DImode, but
65 target supports TImode, so _BitInt(65) to _BitInt(128) are middle
66 ones), large _BitInt which should by straight line code and
67 finally huge _BitInt which should be handled by loops over the limbs. */
69 enum bitint_prec_kind
{
76 /* Caches to speed up bitint_precision_kind. */
78 static int small_max_prec
, mid_min_prec
, large_min_prec
, huge_min_prec
;
81 /* Categorize _BitInt(PREC) as small, middle, large or huge. */
83 static bitint_prec_kind
84 bitint_precision_kind (int prec
)
86 if (prec
<= small_max_prec
)
87 return bitint_prec_small
;
88 if (huge_min_prec
&& prec
>= huge_min_prec
)
89 return bitint_prec_huge
;
90 if (large_min_prec
&& prec
>= large_min_prec
)
91 return bitint_prec_large
;
92 if (mid_min_prec
&& prec
>= mid_min_prec
)
93 return bitint_prec_middle
;
95 struct bitint_info info
;
96 bool ok
= targetm
.c
.bitint_type_info (prec
, &info
);
98 scalar_int_mode limb_mode
= as_a
<scalar_int_mode
> (info
.limb_mode
);
99 if (prec
<= GET_MODE_PRECISION (limb_mode
))
101 small_max_prec
= prec
;
102 return bitint_prec_small
;
105 && GET_MODE_PRECISION (limb_mode
) < MAX_FIXED_MODE_SIZE
)
106 large_min_prec
= MAX_FIXED_MODE_SIZE
+ 1;
108 limb_prec
= GET_MODE_PRECISION (limb_mode
);
111 if (4 * limb_prec
>= MAX_FIXED_MODE_SIZE
)
112 huge_min_prec
= 4 * limb_prec
;
114 huge_min_prec
= MAX_FIXED_MODE_SIZE
+ 1;
116 if (prec
<= MAX_FIXED_MODE_SIZE
)
118 if (!mid_min_prec
|| prec
< mid_min_prec
)
120 return bitint_prec_middle
;
122 if (large_min_prec
&& prec
<= large_min_prec
)
123 return bitint_prec_large
;
124 return bitint_prec_huge
;
127 /* Same for a TYPE. */
129 static bitint_prec_kind
130 bitint_precision_kind (tree type
)
132 return bitint_precision_kind (TYPE_PRECISION (type
));
135 /* Return minimum precision needed to describe INTEGER_CST
136 CST. All bits above that precision up to precision of
137 TREE_TYPE (CST) are cleared if EXT is set to 0, or set
138 if EXT is set to -1. */
141 bitint_min_cst_precision (tree cst
, int &ext
)
143 ext
= tree_int_cst_sgn (cst
) < 0 ? -1 : 0;
144 wide_int w
= wi::to_wide (cst
);
145 unsigned min_prec
= wi::min_precision (w
, TYPE_SIGN (TREE_TYPE (cst
)));
146 /* For signed values, we don't need to count the sign bit,
147 we'll use constant 0 or -1 for the upper bits. */
148 if (!TYPE_UNSIGNED (TREE_TYPE (cst
)))
152 /* For unsigned values, also try signed min_precision
153 in case the constant has lots of most significant bits set. */
154 unsigned min_prec2
= wi::min_precision (w
, SIGNED
) - 1;
155 if (min_prec2
< min_prec
)
166 /* If OP is middle _BitInt, cast it to corresponding INTEGER_TYPE
167 cached in TYPE and return it. */
170 maybe_cast_middle_bitint (gimple_stmt_iterator
*gsi
, tree op
, tree
&type
)
173 || TREE_CODE (TREE_TYPE (op
)) != BITINT_TYPE
174 || bitint_precision_kind (TREE_TYPE (op
)) != bitint_prec_middle
)
177 int prec
= TYPE_PRECISION (TREE_TYPE (op
));
178 int uns
= TYPE_UNSIGNED (TREE_TYPE (op
));
179 if (type
== NULL_TREE
180 || TYPE_PRECISION (type
) != prec
181 || TYPE_UNSIGNED (type
) != uns
)
182 type
= build_nonstandard_integer_type (prec
, uns
);
184 if (TREE_CODE (op
) != SSA_NAME
)
186 tree nop
= fold_convert (type
, op
);
187 if (is_gimple_val (nop
))
191 tree nop
= make_ssa_name (type
);
192 gimple
*g
= gimple_build_assign (nop
, NOP_EXPR
, op
);
193 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
197 /* Return true if STMT can be handled in a loop from least to most
198 significant limb together with its dependencies. */
201 mergeable_op (gimple
*stmt
)
203 if (!is_gimple_assign (stmt
))
205 switch (gimple_assign_rhs_code (stmt
))
220 tree cnt
= gimple_assign_rhs2 (stmt
);
221 if (tree_fits_uhwi_p (cnt
)
222 && tree_to_uhwi (cnt
) < (unsigned HOST_WIDE_INT
) limb_prec
)
227 case VIEW_CONVERT_EXPR
:
229 tree lhs_type
= TREE_TYPE (gimple_assign_lhs (stmt
));
230 tree rhs_type
= TREE_TYPE (gimple_assign_rhs1 (stmt
));
231 if (TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
232 && TREE_CODE (lhs_type
) == BITINT_TYPE
233 && TREE_CODE (rhs_type
) == BITINT_TYPE
234 && bitint_precision_kind (lhs_type
) >= bitint_prec_large
235 && bitint_precision_kind (rhs_type
) >= bitint_prec_large
236 && (CEIL (TYPE_PRECISION (lhs_type
), limb_prec
)
237 == CEIL (TYPE_PRECISION (rhs_type
), limb_prec
)))
239 if (TYPE_PRECISION (rhs_type
) >= TYPE_PRECISION (lhs_type
))
241 if ((unsigned) TYPE_PRECISION (lhs_type
) % (2 * limb_prec
) != 0)
243 if (bitint_precision_kind (lhs_type
) == bitint_prec_large
)
254 /* Return non-zero if stmt is .{ADD,SUB,MUL}_OVERFLOW call with
255 _Complex large/huge _BitInt lhs which has at most two immediate uses,
256 at most one use in REALPART_EXPR stmt in the same bb and exactly one
257 IMAGPART_EXPR use in the same bb with a single use which casts it to
258 non-BITINT_TYPE integral type. If there is a REALPART_EXPR use,
259 return 2. Such cases (most common uses of those builtins) can be
260 optimized by marking their lhs and lhs of IMAGPART_EXPR and maybe lhs
261 of REALPART_EXPR as not needed to be backed up by a stack variable.
262 For .UBSAN_CHECK_{ADD,SUB,MUL} return 3. */
265 optimizable_arith_overflow (gimple
*stmt
)
267 bool is_ubsan
= false;
268 if (!is_gimple_call (stmt
) || !gimple_call_internal_p (stmt
))
270 switch (gimple_call_internal_fn (stmt
))
272 case IFN_ADD_OVERFLOW
:
273 case IFN_SUB_OVERFLOW
:
274 case IFN_MUL_OVERFLOW
:
276 case IFN_UBSAN_CHECK_ADD
:
277 case IFN_UBSAN_CHECK_SUB
:
278 case IFN_UBSAN_CHECK_MUL
:
284 tree lhs
= gimple_call_lhs (stmt
);
287 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
289 tree type
= is_ubsan
? TREE_TYPE (lhs
) : TREE_TYPE (TREE_TYPE (lhs
));
290 if (TREE_CODE (type
) != BITINT_TYPE
291 || bitint_precision_kind (type
) < bitint_prec_large
)
298 if (!single_imm_use (lhs
, &use_p
, &use_stmt
)
299 || gimple_bb (use_stmt
) != gimple_bb (stmt
)
300 || !gimple_store_p (use_stmt
)
301 || !is_gimple_assign (use_stmt
)
302 || gimple_has_volatile_ops (use_stmt
)
303 || stmt_ends_bb_p (use_stmt
))
311 gimple
*realpart
= NULL
, *cast
= NULL
;
312 FOR_EACH_IMM_USE_FAST (use_p
, ui
, lhs
)
314 gimple
*g
= USE_STMT (use_p
);
315 if (is_gimple_debug (g
))
317 if (!is_gimple_assign (g
) || gimple_bb (g
) != gimple_bb (stmt
))
319 if (gimple_assign_rhs_code (g
) == REALPART_EXPR
)
326 else if (gimple_assign_rhs_code (g
) == IMAGPART_EXPR
)
332 use_operand_p use2_p
;
334 tree lhs2
= gimple_assign_lhs (g
);
335 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs2
))
337 if (!single_imm_use (lhs2
, &use2_p
, &use_stmt
)
338 || gimple_bb (use_stmt
) != gimple_bb (stmt
)
339 || !gimple_assign_cast_p (use_stmt
))
342 lhs2
= gimple_assign_lhs (use_stmt
);
343 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs2
))
344 || TREE_CODE (TREE_TYPE (lhs2
)) == BITINT_TYPE
)
355 /* Punt if the cast stmt appears before realpart stmt, because
356 if both appear, the lowering wants to emit all the code
357 at the location of realpart stmt. */
358 gimple_stmt_iterator gsi
= gsi_for_stmt (realpart
);
359 unsigned int cnt
= 0;
362 gsi_prev_nondebug (&gsi
);
363 if (gsi_end_p (gsi
) || gsi_stmt (gsi
) == cast
)
365 if (gsi_stmt (gsi
) == stmt
)
367 /* If realpart is too far from stmt, punt as well.
368 Usually it will appear right after it. */
377 /* If STMT is some kind of comparison (GIMPLE_COND, comparison assignment)
378 comparing large/huge _BitInt types, return the comparison code and if
379 non-NULL fill in the comparison operands to *POP1 and *POP2. */
382 comparison_op (gimple
*stmt
, tree
*pop1
, tree
*pop2
)
384 tree op1
= NULL_TREE
, op2
= NULL_TREE
;
385 tree_code code
= ERROR_MARK
;
386 if (gimple_code (stmt
) == GIMPLE_COND
)
388 code
= gimple_cond_code (stmt
);
389 op1
= gimple_cond_lhs (stmt
);
390 op2
= gimple_cond_rhs (stmt
);
392 else if (is_gimple_assign (stmt
))
394 code
= gimple_assign_rhs_code (stmt
);
395 op1
= gimple_assign_rhs1 (stmt
);
396 if (TREE_CODE_CLASS (code
) == tcc_comparison
397 || TREE_CODE_CLASS (code
) == tcc_binary
)
398 op2
= gimple_assign_rhs2 (stmt
);
400 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
402 tree type
= TREE_TYPE (op1
);
403 if (TREE_CODE (type
) != BITINT_TYPE
404 || bitint_precision_kind (type
) < bitint_prec_large
)
414 /* Class used during large/huge _BitInt lowering containing all the
415 state for the methods. */
417 struct bitint_large_huge
420 : m_names (NULL
), m_loads (NULL
), m_preserved (NULL
),
421 m_single_use_names (NULL
), m_map (NULL
), m_vars (NULL
),
422 m_limb_type (NULL_TREE
), m_data (vNULL
),
423 m_returns_twice_calls (vNULL
) {}
425 ~bitint_large_huge ();
427 void insert_before (gimple
*);
428 tree
limb_access_type (tree
, tree
);
429 tree
limb_access (tree
, tree
, tree
, bool);
430 tree
build_bit_field_ref (tree
, tree
, unsigned HOST_WIDE_INT
,
431 unsigned HOST_WIDE_INT
);
432 void if_then (gimple
*, profile_probability
, edge
&, edge
&);
433 void if_then_else (gimple
*, profile_probability
, edge
&, edge
&);
434 void if_then_if_then_else (gimple
*g
, gimple
*,
435 profile_probability
, profile_probability
,
436 edge
&, edge
&, edge
&);
437 tree
handle_operand (tree
, tree
);
438 tree
prepare_data_in_out (tree
, tree
, tree
*, tree
= NULL_TREE
);
439 tree
add_cast (tree
, tree
);
440 tree
handle_plus_minus (tree_code
, tree
, tree
, tree
);
441 tree
handle_lshift (tree
, tree
, tree
);
442 tree
handle_cast (tree
, tree
, tree
);
443 tree
handle_bit_field_ref (tree
, tree
);
444 tree
handle_load (gimple
*, tree
);
445 tree
handle_stmt (gimple
*, tree
);
446 tree
handle_operand_addr (tree
, gimple
*, int *, int *);
447 tree
create_loop (tree
, tree
*);
448 tree
lower_mergeable_stmt (gimple
*, tree_code
&, tree
, tree
);
449 tree
lower_comparison_stmt (gimple
*, tree_code
&, tree
, tree
);
450 void lower_shift_stmt (tree
, gimple
*);
451 void lower_muldiv_stmt (tree
, gimple
*);
452 void lower_float_conv_stmt (tree
, gimple
*);
453 tree
arith_overflow_extract_bits (unsigned int, unsigned int, tree
,
455 void finish_arith_overflow (tree
, tree
, tree
, tree
, tree
, tree
, gimple
*,
457 void lower_addsub_overflow (tree
, gimple
*);
458 void lower_mul_overflow (tree
, gimple
*);
459 void lower_cplxpart_stmt (tree
, gimple
*);
460 void lower_complexexpr_stmt (gimple
*);
461 void lower_bit_query (gimple
*);
462 void lower_call (tree
, gimple
*);
463 void lower_asm (gimple
*);
464 void lower_stmt (gimple
*);
466 /* Bitmap of large/huge _BitInt SSA_NAMEs except those can be
467 merged with their uses. */
469 /* Subset of those for lhs of load statements. These will be
470 cleared in m_names if the loads will be mergeable with all
473 /* Bitmap of large/huge _BitInt SSA_NAMEs that should survive
474 to later passes (arguments or return values of calls). */
476 /* Subset of m_names which have a single use. As the lowering
477 can replace various original statements with their lowered
478 form even before it is done iterating over all basic blocks,
479 testing has_single_use for the purpose of emitting clobbers
480 doesn't work properly. */
481 bitmap m_single_use_names
;
482 /* Used for coalescing/partitioning of large/huge _BitInt SSA_NAMEs
485 /* Mapping of the partitions to corresponding decls. */
487 /* Unsigned integer type with limb precision. */
489 /* Its TYPE_SIZE_UNIT. */
490 unsigned HOST_WIDE_INT m_limb_size
;
491 /* Location of a gimple stmt which is being currently lowered. */
493 /* Current stmt iterator where code is being lowered currently. */
494 gimple_stmt_iterator m_gsi
;
495 /* Statement after which any clobbers should be added if non-NULL. */
496 gimple
*m_after_stmt
;
497 /* Set when creating loops to the loop header bb and its preheader. */
498 basic_block m_bb
, m_preheader_bb
;
499 /* Stmt iterator after which initialization statements should be emitted. */
500 gimple_stmt_iterator m_init_gsi
;
501 /* Decl into which a mergeable statement stores result. */
503 /* handle_operand/handle_stmt can be invoked in various ways.
505 lower_mergeable_stmt for large _BitInt calls those with constant
506 idx only, expanding to straight line code, for huge _BitInt
507 emits a loop from least significant limb upwards, where each loop
508 iteration handles 2 limbs, plus there can be up to one full limb
509 and one partial limb processed after the loop, where handle_operand
510 and/or handle_stmt are called with constant idx. m_upwards_2limb
511 is set for this case, false otherwise. m_upwards is true if it
512 is either large or huge _BitInt handled by lower_mergeable_stmt,
513 i.e. indexes always increase.
515 Another way is used by lower_comparison_stmt, which walks limbs
516 from most significant to least significant, partial limb if any
517 processed first with constant idx and then loop processing a single
518 limb per iteration with non-constant idx.
520 Another way is used in lower_shift_stmt, where for LSHIFT_EXPR
521 destination limbs are processed from most significant to least
522 significant or for RSHIFT_EXPR the other way around, in loops or
523 straight line code, but idx usually is non-constant (so from
524 handle_operand/handle_stmt POV random access). The LSHIFT_EXPR
525 handling there can access even partial limbs using non-constant
526 idx (then m_var_msb should be true, for all the other cases
527 including lower_mergeable_stmt/lower_comparison_stmt that is
528 not the case and so m_var_msb should be false.
530 m_first should be set the first time handle_operand/handle_stmt
531 is called and clear when it is called for some other limb with
532 the same argument. If the lowering of an operand (e.g. INTEGER_CST)
533 or statement (e.g. +/-/<< with < limb_prec constant) needs some
534 state between the different calls, when m_first is true it should
535 push some trees to m_data vector and also make sure m_data_cnt is
536 incremented by how many trees were pushed, and when m_first is
537 false, it can use the m_data[m_data_cnt] etc. data or update them,
538 just needs to bump m_data_cnt by the same amount as when it was
539 called with m_first set. The toplevel calls to
540 handle_operand/handle_stmt should set m_data_cnt to 0 and truncate
541 m_data vector when setting m_first to true.
543 m_cast_conditional and m_bitfld_load are used when handling a
544 bit-field load inside of a widening cast. handle_cast sometimes
545 needs to do runtime comparisons and handle_operand only conditionally
546 or even in two separate conditional blocks for one idx (once with
547 constant index after comparing the runtime one for equality with the
548 constant). In these cases, m_cast_conditional is set to true and
549 the bit-field load then communicates its m_data_cnt to handle_cast
550 using m_bitfld_load. */
553 unsigned m_upwards_2limb
;
555 bool m_cast_conditional
;
556 unsigned m_bitfld_load
;
558 unsigned int m_data_cnt
;
559 vec
<gimple
*> m_returns_twice_calls
;
562 bitint_large_huge::~bitint_large_huge ()
564 BITMAP_FREE (m_names
);
565 BITMAP_FREE (m_loads
);
566 BITMAP_FREE (m_preserved
);
567 BITMAP_FREE (m_single_use_names
);
569 delete_var_map (m_map
);
572 m_returns_twice_calls
.release ();
575 /* Insert gimple statement G before current location
576 and set its gimple_location. */
579 bitint_large_huge::insert_before (gimple
*g
)
581 gimple_set_location (g
, m_loc
);
582 gsi_insert_before (&m_gsi
, g
, GSI_SAME_STMT
);
585 /* Return type for accessing limb IDX of BITINT_TYPE TYPE.
586 This is normally m_limb_type, except for a partial most
587 significant limb if any. */
590 bitint_large_huge::limb_access_type (tree type
, tree idx
)
592 if (type
== NULL_TREE
)
594 unsigned HOST_WIDE_INT i
= tree_to_uhwi (idx
);
595 unsigned int prec
= TYPE_PRECISION (type
);
596 gcc_assert (i
* limb_prec
< prec
);
597 if ((i
+ 1) * limb_prec
<= prec
)
600 return build_nonstandard_integer_type (prec
% limb_prec
,
601 TYPE_UNSIGNED (type
));
604 /* Return a tree how to access limb IDX of VAR corresponding to BITINT_TYPE
605 TYPE. If WRITE_P is true, it will be a store, otherwise a read. */
608 bitint_large_huge::limb_access (tree type
, tree var
, tree idx
, bool write_p
)
610 tree atype
= (tree_fits_uhwi_p (idx
)
611 ? limb_access_type (type
, idx
) : m_limb_type
);
612 tree ltype
= m_limb_type
;
613 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (var
));
614 if (as
!= TYPE_ADDR_SPACE (ltype
))
615 ltype
= build_qualified_type (ltype
, TYPE_QUALS (ltype
)
616 | ENCODE_QUAL_ADDR_SPACE (as
));
618 if (DECL_P (var
) && tree_fits_uhwi_p (idx
))
620 tree ptype
= build_pointer_type (strip_array_types (TREE_TYPE (var
)));
621 unsigned HOST_WIDE_INT off
= tree_to_uhwi (idx
) * m_limb_size
;
622 ret
= build2 (MEM_REF
, ltype
,
623 build_fold_addr_expr (var
),
624 build_int_cst (ptype
, off
));
625 TREE_THIS_VOLATILE (ret
) = TREE_THIS_VOLATILE (var
);
626 TREE_SIDE_EFFECTS (ret
) = TREE_SIDE_EFFECTS (var
);
628 else if (TREE_CODE (var
) == MEM_REF
&& tree_fits_uhwi_p (idx
))
631 = build2 (MEM_REF
, ltype
, unshare_expr (TREE_OPERAND (var
, 0)),
632 size_binop (PLUS_EXPR
, TREE_OPERAND (var
, 1),
633 build_int_cst (TREE_TYPE (TREE_OPERAND (var
, 1)),
636 TREE_THIS_VOLATILE (ret
) = TREE_THIS_VOLATILE (var
);
637 TREE_SIDE_EFFECTS (ret
) = TREE_SIDE_EFFECTS (var
);
638 TREE_THIS_NOTRAP (ret
) = TREE_THIS_NOTRAP (var
);
642 var
= unshare_expr (var
);
643 if (TREE_CODE (TREE_TYPE (var
)) != ARRAY_TYPE
644 || !useless_type_conversion_p (m_limb_type
,
645 TREE_TYPE (TREE_TYPE (var
))))
647 unsigned HOST_WIDE_INT nelts
648 = CEIL (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (var
))), limb_prec
);
649 tree atype
= build_array_type_nelts (ltype
, nelts
);
650 var
= build1 (VIEW_CONVERT_EXPR
, atype
, var
);
652 ret
= build4 (ARRAY_REF
, ltype
, var
, idx
, NULL_TREE
, NULL_TREE
);
654 if (!write_p
&& !useless_type_conversion_p (atype
, m_limb_type
))
656 gimple
*g
= gimple_build_assign (make_ssa_name (m_limb_type
), ret
);
658 ret
= gimple_assign_lhs (g
);
659 ret
= build1 (NOP_EXPR
, atype
, ret
);
664 /* Build a BIT_FIELD_REF to access BITSIZE bits with FTYPE type at
665 offset BITPOS inside of OBJ. */
668 bitint_large_huge::build_bit_field_ref (tree ftype
, tree obj
,
669 unsigned HOST_WIDE_INT bitsize
,
670 unsigned HOST_WIDE_INT bitpos
)
672 if (INTEGRAL_TYPE_P (TREE_TYPE (obj
))
673 && !type_has_mode_precision_p (TREE_TYPE (obj
)))
675 unsigned HOST_WIDE_INT nelts
676 = CEIL (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj
))), limb_prec
);
677 tree ltype
= m_limb_type
;
678 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (obj
));
679 if (as
!= TYPE_ADDR_SPACE (ltype
))
680 ltype
= build_qualified_type (ltype
, TYPE_QUALS (ltype
)
681 | ENCODE_QUAL_ADDR_SPACE (as
));
682 tree atype
= build_array_type_nelts (ltype
, nelts
);
683 obj
= build1 (VIEW_CONVERT_EXPR
, atype
, obj
);
685 return build3 (BIT_FIELD_REF
, ftype
, obj
, bitsize_int (bitsize
),
686 bitsize_int (bitpos
));
689 /* Emit a half diamond,
698 or if (COND) new_bb1;
699 PROB is the probability that the condition is true.
700 Updates m_gsi to start of new_bb1.
701 Sets EDGE_TRUE to edge from new_bb1 to successor and
702 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND) bb. */
705 bitint_large_huge::if_then (gimple
*cond
, profile_probability prob
,
706 edge
&edge_true
, edge
&edge_false
)
708 insert_before (cond
);
709 edge e1
= split_block (gsi_bb (m_gsi
), cond
);
710 edge e2
= split_block (e1
->dest
, (gimple
*) NULL
);
711 edge e3
= make_edge (e1
->src
, e2
->dest
, EDGE_FALSE_VALUE
);
712 e1
->flags
= EDGE_TRUE_VALUE
;
713 e1
->probability
= prob
;
714 e3
->probability
= prob
.invert ();
715 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e1
->src
);
718 m_gsi
= gsi_after_labels (e1
->dest
);
721 /* Emit a full diamond,
730 or if (COND) new_bb2; else new_bb1;
731 PROB is the probability that the condition is true.
732 Updates m_gsi to start of new_bb2.
733 Sets EDGE_TRUE to edge from new_bb1 to successor and
734 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND) bb. */
737 bitint_large_huge::if_then_else (gimple
*cond
, profile_probability prob
,
738 edge
&edge_true
, edge
&edge_false
)
740 insert_before (cond
);
741 edge e1
= split_block (gsi_bb (m_gsi
), cond
);
742 edge e2
= split_block (e1
->dest
, (gimple
*) NULL
);
743 basic_block bb
= create_empty_bb (e1
->dest
);
744 add_bb_to_loop (bb
, e1
->dest
->loop_father
);
745 edge e3
= make_edge (e1
->src
, bb
, EDGE_TRUE_VALUE
);
746 e1
->flags
= EDGE_FALSE_VALUE
;
747 e3
->probability
= prob
;
748 e1
->probability
= prob
.invert ();
749 bb
->count
= e1
->src
->count
.apply_probability (prob
);
750 set_immediate_dominator (CDI_DOMINATORS
, bb
, e1
->src
);
751 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e1
->src
);
752 edge_true
= make_single_succ_edge (bb
, e2
->dest
, EDGE_FALLTHRU
);
754 m_gsi
= gsi_after_labels (bb
);
757 /* Emit a half diamond with full diamond in it
771 or if (COND1) { if (COND2) new_bb2; else new_bb1; }
772 PROB1 is the probability that the condition 1 is true.
773 PROB2 is the probability that the condition 2 is true.
774 Updates m_gsi to start of new_bb1.
775 Sets EDGE_TRUE_TRUE to edge from new_bb2 to successor,
776 EDGE_TRUE_FALSE to edge from new_bb1 to successor and
777 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND1) bb.
778 If COND2 is NULL, this is equivalent to
779 if_then (COND1, PROB1, EDGE_TRUE_FALSE, EDGE_FALSE);
780 EDGE_TRUE_TRUE = NULL; */
783 bitint_large_huge::if_then_if_then_else (gimple
*cond1
, gimple
*cond2
,
784 profile_probability prob1
,
785 profile_probability prob2
,
786 edge
&edge_true_true
,
787 edge
&edge_true_false
,
790 edge e2
, e3
, e4
= NULL
;
791 if_then (cond1
, prob1
, e2
, e3
);
794 edge_true_true
= NULL
;
795 edge_true_false
= e2
;
799 insert_before (cond2
);
800 e2
= split_block (gsi_bb (m_gsi
), cond2
);
801 basic_block bb
= create_empty_bb (e2
->dest
);
802 add_bb_to_loop (bb
, e2
->dest
->loop_father
);
803 e4
= make_edge (e2
->src
, bb
, EDGE_TRUE_VALUE
);
804 set_immediate_dominator (CDI_DOMINATORS
, bb
, e2
->src
);
805 e4
->probability
= prob2
;
806 e2
->flags
= EDGE_FALSE_VALUE
;
807 e2
->probability
= prob2
.invert ();
808 bb
->count
= e2
->src
->count
.apply_probability (prob2
);
809 e4
= make_single_succ_edge (bb
, e3
->dest
, EDGE_FALLTHRU
);
810 e2
= find_edge (e2
->dest
, e3
->dest
);
812 edge_true_false
= e2
;
814 m_gsi
= gsi_after_labels (e2
->src
);
817 /* Emit code to access limb IDX from OP. */
820 bitint_large_huge::handle_operand (tree op
, tree idx
)
822 switch (TREE_CODE (op
))
826 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (op
)))
828 if (SSA_NAME_IS_DEFAULT_DEF (op
))
832 tree v
= create_tmp_reg (m_limb_type
);
833 if (SSA_NAME_VAR (op
) && VAR_P (SSA_NAME_VAR (op
)))
835 DECL_NAME (v
) = DECL_NAME (SSA_NAME_VAR (op
));
836 DECL_SOURCE_LOCATION (v
)
837 = DECL_SOURCE_LOCATION (SSA_NAME_VAR (op
));
839 v
= get_or_create_ssa_default_def (cfun
, v
);
840 m_data
.safe_push (v
);
842 tree ret
= m_data
[m_data_cnt
];
844 if (tree_fits_uhwi_p (idx
))
846 tree type
= limb_access_type (TREE_TYPE (op
), idx
);
847 ret
= add_cast (type
, ret
);
851 location_t loc_save
= m_loc
;
852 m_loc
= gimple_location (SSA_NAME_DEF_STMT (op
));
853 tree ret
= handle_stmt (SSA_NAME_DEF_STMT (op
), idx
);
860 p
= var_to_partition (m_map
, op
);
861 gcc_assert (m_vars
[p
] != NULL_TREE
);
862 t
= limb_access (TREE_TYPE (op
), m_vars
[p
], idx
, false);
863 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (t
)), t
);
865 t
= gimple_assign_lhs (g
);
867 && m_single_use_names
868 && m_vars
[p
] != m_lhs
870 && bitmap_bit_p (m_single_use_names
, SSA_NAME_VERSION (op
)))
872 tree clobber
= build_clobber (TREE_TYPE (m_vars
[p
]),
873 CLOBBER_STORAGE_END
);
874 g
= gimple_build_assign (m_vars
[p
], clobber
);
875 gimple_stmt_iterator gsi
= gsi_for_stmt (m_after_stmt
);
876 gsi_insert_after (&gsi
, g
, GSI_SAME_STMT
);
880 if (tree_fits_uhwi_p (idx
))
882 tree c
, type
= limb_access_type (TREE_TYPE (op
), idx
);
883 unsigned HOST_WIDE_INT i
= tree_to_uhwi (idx
);
886 m_data
.safe_push (NULL_TREE
);
887 m_data
.safe_push (NULL_TREE
);
889 if (limb_prec
!= HOST_BITS_PER_WIDE_INT
)
891 wide_int w
= wi::rshift (wi::to_wide (op
), i
* limb_prec
,
892 TYPE_SIGN (TREE_TYPE (op
)));
893 c
= wide_int_to_tree (type
,
894 wide_int::from (w
, TYPE_PRECISION (type
),
897 else if (i
>= TREE_INT_CST_EXT_NUNITS (op
))
898 c
= build_int_cst (type
,
899 tree_int_cst_sgn (op
) < 0 ? -1 : 0);
901 c
= build_int_cst (type
, TREE_INT_CST_ELT (op
, i
));
906 || (m_data
[m_data_cnt
] == NULL_TREE
907 && m_data
[m_data_cnt
+ 1] == NULL_TREE
))
909 unsigned int prec
= TYPE_PRECISION (TREE_TYPE (op
));
910 unsigned int rem
= prec
% ((m_upwards_2limb
? 2 : 1) * limb_prec
);
912 unsigned min_prec
= bitint_min_cst_precision (op
, ext
);
915 m_data
.safe_push (NULL_TREE
);
916 m_data
.safe_push (NULL_TREE
);
918 if (integer_zerop (op
))
920 tree c
= build_zero_cst (m_limb_type
);
921 m_data
[m_data_cnt
] = c
;
922 m_data
[m_data_cnt
+ 1] = c
;
924 else if (integer_all_onesp (op
))
926 tree c
= build_all_ones_cst (m_limb_type
);
927 m_data
[m_data_cnt
] = c
;
928 m_data
[m_data_cnt
+ 1] = c
;
930 else if (m_upwards_2limb
&& min_prec
<= (unsigned) limb_prec
)
932 /* Single limb constant. Use a phi with that limb from
933 the preheader edge and 0 or -1 constant from the other edge
934 and for the second limb in the loop. */
936 gcc_assert (m_first
);
939 prepare_data_in_out (fold_convert (m_limb_type
, op
), idx
, &out
,
940 build_int_cst (m_limb_type
, ext
));
942 else if (min_prec
> prec
- rem
- 2 * limb_prec
)
944 /* Constant which has enough significant bits that it isn't
945 worth trying to save .rodata space by extending from smaller
949 type
= TREE_TYPE (op
);
951 /* If we have a guarantee the most significant partial limb
952 (if any) will be only accessed through handle_operand
953 with INTEGER_CST idx, we don't need to include the partial
955 type
= build_bitint_type (prec
- rem
, 1);
956 tree c
= tree_output_constant_def (fold_convert (type
, op
));
957 m_data
[m_data_cnt
] = c
;
958 m_data
[m_data_cnt
+ 1] = NULL_TREE
;
960 else if (m_upwards_2limb
)
962 /* Constant with smaller number of bits. Trade conditional
963 code for .rodata space by extending from smaller number. */
964 min_prec
= CEIL (min_prec
, 2 * limb_prec
) * (2 * limb_prec
);
965 tree type
= build_bitint_type (min_prec
, 1);
966 tree c
= tree_output_constant_def (fold_convert (type
, op
));
967 tree idx2
= make_ssa_name (sizetype
);
968 g
= gimple_build_assign (idx2
, PLUS_EXPR
, idx
, size_one_node
);
970 g
= gimple_build_cond (LT_EXPR
, idx
,
971 size_int (min_prec
/ limb_prec
),
972 NULL_TREE
, NULL_TREE
);
973 edge edge_true
, edge_false
;
974 if_then (g
, (min_prec
>= (prec
- rem
) / 2
975 ? profile_probability::likely ()
976 : profile_probability::unlikely ()),
977 edge_true
, edge_false
);
978 tree c1
= limb_access (TREE_TYPE (op
), c
, idx
, false);
979 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (c1
)), c1
);
981 c1
= gimple_assign_lhs (g
);
982 tree c2
= limb_access (TREE_TYPE (op
), c
, idx2
, false);
983 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (c2
)), c2
);
985 c2
= gimple_assign_lhs (g
);
986 tree c3
= build_int_cst (m_limb_type
, ext
);
987 m_gsi
= gsi_after_labels (edge_true
->dest
);
988 m_data
[m_data_cnt
] = make_ssa_name (m_limb_type
);
989 m_data
[m_data_cnt
+ 1] = make_ssa_name (m_limb_type
);
990 gphi
*phi
= create_phi_node (m_data
[m_data_cnt
],
992 add_phi_arg (phi
, c1
, edge_true
, UNKNOWN_LOCATION
);
993 add_phi_arg (phi
, c3
, edge_false
, UNKNOWN_LOCATION
);
994 phi
= create_phi_node (m_data
[m_data_cnt
+ 1], edge_true
->dest
);
995 add_phi_arg (phi
, c2
, edge_true
, UNKNOWN_LOCATION
);
996 add_phi_arg (phi
, c3
, edge_false
, UNKNOWN_LOCATION
);
1000 /* Constant with smaller number of bits. Trade conditional
1001 code for .rodata space by extending from smaller number.
1002 Version for loops with random access to the limbs or
1004 min_prec
= CEIL (min_prec
, limb_prec
) * limb_prec
;
1006 if (min_prec
<= (unsigned) limb_prec
)
1007 c
= fold_convert (m_limb_type
, op
);
1010 tree type
= build_bitint_type (min_prec
, 1);
1011 c
= tree_output_constant_def (fold_convert (type
, op
));
1013 m_data
[m_data_cnt
] = c
;
1014 m_data
[m_data_cnt
+ 1] = integer_type_node
;
1016 t
= m_data
[m_data_cnt
];
1017 if (m_data
[m_data_cnt
+ 1] == NULL_TREE
)
1019 t
= limb_access (TREE_TYPE (op
), t
, idx
, false);
1020 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (t
)), t
);
1022 t
= gimple_assign_lhs (g
);
1025 else if (m_data
[m_data_cnt
+ 1] == NULL_TREE
)
1027 t
= limb_access (TREE_TYPE (op
), m_data
[m_data_cnt
], idx
, false);
1028 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (t
)), t
);
1030 t
= gimple_assign_lhs (g
);
1033 t
= m_data
[m_data_cnt
+ 1];
1034 if (m_data
[m_data_cnt
+ 1] == integer_type_node
)
1036 unsigned int prec
= TYPE_PRECISION (TREE_TYPE (op
));
1037 unsigned rem
= prec
% ((m_upwards_2limb
? 2 : 1) * limb_prec
);
1038 int ext
= wi::neg_p (wi::to_wide (op
)) ? -1 : 0;
1039 tree c
= m_data
[m_data_cnt
];
1040 unsigned min_prec
= TYPE_PRECISION (TREE_TYPE (c
));
1041 g
= gimple_build_cond (LT_EXPR
, idx
,
1042 size_int (min_prec
/ limb_prec
),
1043 NULL_TREE
, NULL_TREE
);
1044 edge edge_true
, edge_false
;
1045 if_then (g
, (min_prec
>= (prec
- rem
) / 2
1046 ? profile_probability::likely ()
1047 : profile_probability::unlikely ()),
1048 edge_true
, edge_false
);
1049 if (min_prec
> (unsigned) limb_prec
)
1051 c
= limb_access (TREE_TYPE (op
), c
, idx
, false);
1052 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (c
)), c
);
1054 c
= gimple_assign_lhs (g
);
1056 tree c2
= build_int_cst (m_limb_type
, ext
);
1057 m_gsi
= gsi_after_labels (edge_true
->dest
);
1058 t
= make_ssa_name (m_limb_type
);
1059 gphi
*phi
= create_phi_node (t
, edge_true
->dest
);
1060 add_phi_arg (phi
, c
, edge_true
, UNKNOWN_LOCATION
);
1061 add_phi_arg (phi
, c2
, edge_false
, UNKNOWN_LOCATION
);
1070 /* Helper method, add a PHI node with VAL from preheader edge if
1071 inside of a loop and m_first. Keep state in a pair of m_data
1072 elements. If VAL_OUT is non-NULL, use that as PHI argument from
1073 the latch edge, otherwise create a new SSA_NAME for it and let
1074 caller initialize it. */
1077 bitint_large_huge::prepare_data_in_out (tree val
, tree idx
, tree
*data_out
,
1082 *data_out
= tree_fits_uhwi_p (idx
) ? NULL_TREE
: m_data
[m_data_cnt
+ 1];
1083 return m_data
[m_data_cnt
];
1086 *data_out
= NULL_TREE
;
1087 if (tree_fits_uhwi_p (idx
))
1089 m_data
.safe_push (val
);
1090 m_data
.safe_push (NULL_TREE
);
1094 tree in
= make_ssa_name (TREE_TYPE (val
));
1095 gphi
*phi
= create_phi_node (in
, m_bb
);
1096 edge e1
= find_edge (m_preheader_bb
, m_bb
);
1097 edge e2
= EDGE_PRED (m_bb
, 0);
1099 e2
= EDGE_PRED (m_bb
, 1);
1100 add_phi_arg (phi
, val
, e1
, UNKNOWN_LOCATION
);
1101 tree out
= val_out
? val_out
: make_ssa_name (TREE_TYPE (val
));
1102 add_phi_arg (phi
, out
, e2
, UNKNOWN_LOCATION
);
1103 m_data
.safe_push (in
);
1104 m_data
.safe_push (out
);
1108 /* Return VAL cast to TYPE. If VAL is INTEGER_CST, just
1109 convert it without emitting any code, otherwise emit
1110 the conversion statement before the current location. */
1113 bitint_large_huge::add_cast (tree type
, tree val
)
1115 if (TREE_CODE (val
) == INTEGER_CST
)
1116 return fold_convert (type
, val
);
1118 tree lhs
= make_ssa_name (type
);
1119 gimple
*g
= gimple_build_assign (lhs
, NOP_EXPR
, val
);
1124 /* Helper of handle_stmt method, handle PLUS_EXPR or MINUS_EXPR. */
1127 bitint_large_huge::handle_plus_minus (tree_code code
, tree rhs1
, tree rhs2
,
1130 tree lhs
, data_out
, ctype
;
1131 tree rhs1_type
= TREE_TYPE (rhs1
);
1133 tree data_in
= prepare_data_in_out (build_zero_cst (m_limb_type
), idx
,
1136 if (optab_handler (code
== PLUS_EXPR
? uaddc5_optab
: usubc5_optab
,
1137 TYPE_MODE (m_limb_type
)) != CODE_FOR_nothing
)
1139 ctype
= build_complex_type (m_limb_type
);
1140 if (!types_compatible_p (rhs1_type
, m_limb_type
))
1142 if (!TYPE_UNSIGNED (rhs1_type
))
1144 tree type
= unsigned_type_for (rhs1_type
);
1145 rhs1
= add_cast (type
, rhs1
);
1146 rhs2
= add_cast (type
, rhs2
);
1148 rhs1
= add_cast (m_limb_type
, rhs1
);
1149 rhs2
= add_cast (m_limb_type
, rhs2
);
1151 lhs
= make_ssa_name (ctype
);
1152 g
= gimple_build_call_internal (code
== PLUS_EXPR
1153 ? IFN_UADDC
: IFN_USUBC
,
1154 3, rhs1
, rhs2
, data_in
);
1155 gimple_call_set_lhs (g
, lhs
);
1157 if (data_out
== NULL_TREE
)
1158 data_out
= make_ssa_name (m_limb_type
);
1159 g
= gimple_build_assign (data_out
, IMAGPART_EXPR
,
1160 build1 (IMAGPART_EXPR
, m_limb_type
, lhs
));
1163 else if (types_compatible_p (rhs1_type
, m_limb_type
))
1165 ctype
= build_complex_type (m_limb_type
);
1166 lhs
= make_ssa_name (ctype
);
1167 g
= gimple_build_call_internal (code
== PLUS_EXPR
1168 ? IFN_ADD_OVERFLOW
: IFN_SUB_OVERFLOW
,
1170 gimple_call_set_lhs (g
, lhs
);
1172 if (data_out
== NULL_TREE
)
1173 data_out
= make_ssa_name (m_limb_type
);
1174 if (!integer_zerop (data_in
))
1176 rhs1
= make_ssa_name (m_limb_type
);
1177 g
= gimple_build_assign (rhs1
, REALPART_EXPR
,
1178 build1 (REALPART_EXPR
, m_limb_type
, lhs
));
1180 rhs2
= make_ssa_name (m_limb_type
);
1181 g
= gimple_build_assign (rhs2
, IMAGPART_EXPR
,
1182 build1 (IMAGPART_EXPR
, m_limb_type
, lhs
));
1184 lhs
= make_ssa_name (ctype
);
1185 g
= gimple_build_call_internal (code
== PLUS_EXPR
1189 gimple_call_set_lhs (g
, lhs
);
1191 data_in
= make_ssa_name (m_limb_type
);
1192 g
= gimple_build_assign (data_in
, IMAGPART_EXPR
,
1193 build1 (IMAGPART_EXPR
, m_limb_type
, lhs
));
1195 g
= gimple_build_assign (data_out
, PLUS_EXPR
, rhs2
, data_in
);
1200 g
= gimple_build_assign (data_out
, IMAGPART_EXPR
,
1201 build1 (IMAGPART_EXPR
, m_limb_type
, lhs
));
1207 tree in
= add_cast (rhs1_type
, data_in
);
1208 lhs
= make_ssa_name (rhs1_type
);
1209 g
= gimple_build_assign (lhs
, code
, rhs1
, rhs2
);
1211 rhs1
= make_ssa_name (rhs1_type
);
1212 g
= gimple_build_assign (rhs1
, code
, lhs
, in
);
1214 m_data
[m_data_cnt
] = NULL_TREE
;
1218 rhs1
= make_ssa_name (m_limb_type
);
1219 g
= gimple_build_assign (rhs1
, REALPART_EXPR
,
1220 build1 (REALPART_EXPR
, m_limb_type
, lhs
));
1222 if (!types_compatible_p (rhs1_type
, m_limb_type
))
1223 rhs1
= add_cast (rhs1_type
, rhs1
);
1224 m_data
[m_data_cnt
] = data_out
;
1229 /* Helper function for handle_stmt method, handle LSHIFT_EXPR by
1230 count in [0, limb_prec - 1] range. */
1233 bitint_large_huge::handle_lshift (tree rhs1
, tree rhs2
, tree idx
)
1235 unsigned HOST_WIDE_INT cnt
= tree_to_uhwi (rhs2
);
1236 gcc_checking_assert (cnt
< (unsigned) limb_prec
);
1240 tree lhs
, data_out
, rhs1_type
= TREE_TYPE (rhs1
);
1242 tree data_in
= prepare_data_in_out (build_zero_cst (m_limb_type
), idx
,
1245 if (!integer_zerop (data_in
))
1247 lhs
= make_ssa_name (m_limb_type
);
1248 g
= gimple_build_assign (lhs
, RSHIFT_EXPR
, data_in
,
1249 build_int_cst (unsigned_type_node
,
1252 if (!types_compatible_p (rhs1_type
, m_limb_type
))
1253 lhs
= add_cast (rhs1_type
, lhs
);
1256 if (types_compatible_p (rhs1_type
, m_limb_type
))
1258 if (data_out
== NULL_TREE
)
1259 data_out
= make_ssa_name (m_limb_type
);
1260 g
= gimple_build_assign (data_out
, rhs1
);
1263 if (cnt
< (unsigned) TYPE_PRECISION (rhs1_type
))
1265 lhs
= make_ssa_name (rhs1_type
);
1266 g
= gimple_build_assign (lhs
, LSHIFT_EXPR
, rhs1
, rhs2
);
1268 if (!integer_zerop (data_in
))
1271 lhs
= make_ssa_name (rhs1_type
);
1272 g
= gimple_build_assign (lhs
, BIT_IOR_EXPR
, rhs1
, data_in
);
1278 m_data
[m_data_cnt
] = data_out
;
1283 /* Helper function for handle_stmt method, handle an integral
1284 to integral conversion. */
1287 bitint_large_huge::handle_cast (tree lhs_type
, tree rhs1
, tree idx
)
1289 tree rhs_type
= TREE_TYPE (rhs1
);
1291 if ((TREE_CODE (rhs1
) == SSA_NAME
|| TREE_CODE (rhs1
) == INTEGER_CST
)
1292 && TREE_CODE (lhs_type
) == BITINT_TYPE
1293 && TREE_CODE (rhs_type
) == BITINT_TYPE
1294 && bitint_precision_kind (lhs_type
) >= bitint_prec_large
1295 && bitint_precision_kind (rhs_type
) >= bitint_prec_large
)
1297 if (TYPE_PRECISION (rhs_type
) >= TYPE_PRECISION (lhs_type
)
1298 /* If lhs has bigger precision than rhs, we can use
1299 the simple case only if there is a guarantee that
1300 the most significant limb is handled in straight
1301 line code. If m_var_msb (on left shifts) or
1302 if m_upwards_2limb * limb_prec is equal to
1303 lhs precision or if not m_upwards_2limb and lhs_type
1304 has precision which is multiple of limb_prec that is
1307 && (CEIL (TYPE_PRECISION (lhs_type
), limb_prec
)
1308 == CEIL (TYPE_PRECISION (rhs_type
), limb_prec
))
1309 && ((!m_upwards_2limb
1310 && (TYPE_PRECISION (lhs_type
) % limb_prec
!= 0))
1312 && (m_upwards_2limb
* limb_prec
1313 < TYPE_PRECISION (lhs_type
))))))
1315 rhs1
= handle_operand (rhs1
, idx
);
1316 if (tree_fits_uhwi_p (idx
))
1318 tree type
= limb_access_type (lhs_type
, idx
);
1319 if (!types_compatible_p (type
, TREE_TYPE (rhs1
)))
1320 rhs1
= add_cast (type
, rhs1
);
1325 /* Indexes lower than this don't need any special processing. */
1326 unsigned low
= ((unsigned) TYPE_PRECISION (rhs_type
)
1327 - !TYPE_UNSIGNED (rhs_type
)) / limb_prec
;
1328 /* Indexes >= than this always contain an extension. */
1329 unsigned high
= CEIL ((unsigned) TYPE_PRECISION (rhs_type
), limb_prec
);
1330 bool save_first
= m_first
;
1333 m_data
.safe_push (NULL_TREE
);
1334 m_data
.safe_push (NULL_TREE
);
1335 m_data
.safe_push (NULL_TREE
);
1336 if (TYPE_UNSIGNED (rhs_type
))
1337 /* No need to keep state between iterations. */
1339 else if (m_upwards
&& !m_upwards_2limb
)
1340 /* We need to keep state between iterations, but
1341 not within any loop, everything is straight line
1342 code with only increasing indexes. */
1344 else if (!m_upwards_2limb
)
1346 unsigned save_data_cnt
= m_data_cnt
;
1347 gimple_stmt_iterator save_gsi
= m_gsi
;
1349 if (gsi_end_p (m_gsi
))
1350 m_gsi
= gsi_after_labels (gsi_bb (m_gsi
));
1353 m_data_cnt
= save_data_cnt
+ 3;
1354 t
= handle_operand (rhs1
, size_int (low
));
1356 m_data
[save_data_cnt
+ 2]
1357 = build_int_cst (NULL_TREE
, m_data_cnt
);
1358 m_data_cnt
= save_data_cnt
;
1359 t
= add_cast (signed_type_for (m_limb_type
), t
);
1360 tree lpm1
= build_int_cst (unsigned_type_node
, limb_prec
- 1);
1361 tree n
= make_ssa_name (TREE_TYPE (t
));
1362 g
= gimple_build_assign (n
, RSHIFT_EXPR
, t
, lpm1
);
1364 m_data
[save_data_cnt
+ 1] = add_cast (m_limb_type
, n
);
1366 if (gsi_end_p (m_init_gsi
))
1367 m_init_gsi
= gsi_last_bb (gsi_bb (m_init_gsi
));
1369 gsi_prev (&m_init_gsi
);
1372 else if (m_upwards_2limb
* limb_prec
< TYPE_PRECISION (rhs_type
))
1373 /* We need to keep state between iterations, but
1374 fortunately not within the loop, only afterwards. */
1379 m_data
.truncate (m_data_cnt
);
1380 prepare_data_in_out (build_zero_cst (m_limb_type
), idx
, &out
);
1381 m_data
.safe_push (NULL_TREE
);
1385 unsigned save_data_cnt
= m_data_cnt
;
1387 if (!tree_fits_uhwi_p (idx
))
1390 && low
>= m_upwards_2limb
- m_first
)
1392 rhs1
= handle_operand (rhs1
, idx
);
1394 m_data
[save_data_cnt
+ 2]
1395 = build_int_cst (NULL_TREE
, m_data_cnt
);
1396 m_first
= save_first
;
1399 bool single_comparison
1400 = low
== high
|| (m_upwards_2limb
&& (low
& 1) == m_first
);
1402 if (!single_comparison
1405 && low
+ 1 == m_upwards_2limb
)
1406 /* In this case we know that idx <= low always,
1407 so effectively we just needs a single comparison,
1408 idx < low or idx == low, but we'd need to emit different
1409 code for the 2 branches than single_comparison normally
1410 emits. So, instead of special-casing that, emit a
1411 low <= low comparison which cfg cleanup will clean up
1412 at the end of the pass. */
1413 idxc
= size_int (low
);
1414 g
= gimple_build_cond (single_comparison
? LT_EXPR
: LE_EXPR
,
1415 idxc
, size_int (low
), NULL_TREE
, NULL_TREE
);
1416 edge edge_true_true
, edge_true_false
, edge_false
;
1417 if_then_if_then_else (g
, (single_comparison
? NULL
1418 : gimple_build_cond (EQ_EXPR
, idx
,
1422 profile_probability::likely (),
1423 profile_probability::unlikely (),
1424 edge_true_true
, edge_true_false
, edge_false
);
1425 bool save_cast_conditional
= m_cast_conditional
;
1426 m_cast_conditional
= true;
1428 tree t1
= handle_operand (rhs1
, idx
), t2
= NULL_TREE
;
1430 m_data
[save_data_cnt
+ 2]
1431 = build_int_cst (NULL_TREE
, m_data_cnt
);
1432 tree ext
= NULL_TREE
;
1433 tree bitfld
= NULL_TREE
;
1434 if (!single_comparison
)
1436 m_gsi
= gsi_after_labels (edge_true_true
->src
);
1438 m_data_cnt
= save_data_cnt
+ 3;
1441 bitfld
= m_data
[m_bitfld_load
];
1442 m_data
[m_bitfld_load
] = m_data
[m_bitfld_load
+ 2];
1445 t2
= handle_operand (rhs1
, size_int (low
));
1446 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (t2
)))
1447 t2
= add_cast (m_limb_type
, t2
);
1448 if (!TYPE_UNSIGNED (rhs_type
) && m_upwards_2limb
)
1450 ext
= add_cast (signed_type_for (m_limb_type
), t2
);
1451 tree lpm1
= build_int_cst (unsigned_type_node
,
1453 tree n
= make_ssa_name (TREE_TYPE (ext
));
1454 g
= gimple_build_assign (n
, RSHIFT_EXPR
, ext
, lpm1
);
1456 ext
= add_cast (m_limb_type
, n
);
1460 if (TYPE_UNSIGNED (rhs_type
))
1461 t3
= build_zero_cst (m_limb_type
);
1462 else if (m_upwards_2limb
&& (save_first
|| ext
!= NULL_TREE
))
1463 t3
= m_data
[save_data_cnt
];
1465 t3
= m_data
[save_data_cnt
+ 1];
1466 m_gsi
= gsi_after_labels (edge_true_false
->dest
);
1467 t
= make_ssa_name (m_limb_type
);
1468 gphi
*phi
= create_phi_node (t
, edge_true_false
->dest
);
1469 add_phi_arg (phi
, t1
, edge_true_false
, UNKNOWN_LOCATION
);
1470 add_phi_arg (phi
, t3
, edge_false
, UNKNOWN_LOCATION
);
1472 add_phi_arg (phi
, t2
, edge_true_true
, UNKNOWN_LOCATION
);
1475 tree t4
= make_ssa_name (m_limb_type
);
1476 phi
= create_phi_node (t4
, edge_true_false
->dest
);
1477 add_phi_arg (phi
, build_zero_cst (m_limb_type
), edge_true_false
,
1479 add_phi_arg (phi
, m_data
[save_data_cnt
], edge_false
,
1481 add_phi_arg (phi
, ext
, edge_true_true
, UNKNOWN_LOCATION
);
1482 if (!save_cast_conditional
)
1484 g
= gimple_build_assign (m_data
[save_data_cnt
+ 1], t4
);
1488 for (basic_block bb
= gsi_bb (m_gsi
);;)
1490 edge e1
= single_succ_edge (bb
);
1491 edge e2
= find_edge (e1
->dest
, m_bb
), e3
;
1492 tree t5
= (e2
? m_data
[save_data_cnt
+ 1]
1493 : make_ssa_name (m_limb_type
));
1494 phi
= create_phi_node (t5
, e1
->dest
);
1496 FOR_EACH_EDGE (e3
, ei
, e1
->dest
->preds
)
1497 add_phi_arg (phi
, (e3
== e1
? t4
1498 : build_zero_cst (m_limb_type
)),
1499 e3
, UNKNOWN_LOCATION
);
1509 if (!save_first
&& !save_cast_conditional
)
1510 t4
= m_data
[m_bitfld_load
+ 1];
1512 t4
= make_ssa_name (m_limb_type
);
1513 phi
= create_phi_node (t4
, edge_true_false
->dest
);
1515 edge_true_true
? bitfld
: m_data
[m_bitfld_load
],
1516 edge_true_false
, UNKNOWN_LOCATION
);
1517 add_phi_arg (phi
, m_data
[m_bitfld_load
+ 2],
1518 edge_false
, UNKNOWN_LOCATION
);
1520 add_phi_arg (phi
, m_data
[m_bitfld_load
], edge_true_true
,
1522 if (save_cast_conditional
)
1523 for (basic_block bb
= gsi_bb (m_gsi
);;)
1525 edge e1
= single_succ_edge (bb
);
1526 edge e2
= find_edge (e1
->dest
, m_bb
), e3
;
1527 tree t5
= ((e2
&& !save_first
) ? m_data
[m_bitfld_load
+ 1]
1528 : make_ssa_name (m_limb_type
));
1529 phi
= create_phi_node (t5
, e1
->dest
);
1531 FOR_EACH_EDGE (e3
, ei
, e1
->dest
->preds
)
1532 add_phi_arg (phi
, (e3
== e1
? t4
1533 : build_zero_cst (m_limb_type
)),
1534 e3
, UNKNOWN_LOCATION
);
1540 m_data
[m_bitfld_load
] = t4
;
1541 m_data
[m_bitfld_load
+ 2] = t4
;
1544 m_cast_conditional
= save_cast_conditional
;
1545 m_first
= save_first
;
1550 if (tree_to_uhwi (idx
) < low
)
1552 t
= handle_operand (rhs1
, idx
);
1554 m_data
[save_data_cnt
+ 2]
1555 = build_int_cst (NULL_TREE
, m_data_cnt
);
1557 else if (tree_to_uhwi (idx
) < high
)
1559 t
= handle_operand (rhs1
, size_int (low
));
1561 m_data
[save_data_cnt
+ 2]
1562 = build_int_cst (NULL_TREE
, m_data_cnt
);
1563 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (t
)))
1564 t
= add_cast (m_limb_type
, t
);
1565 tree ext
= NULL_TREE
;
1566 if (!TYPE_UNSIGNED (rhs_type
) && m_upwards
)
1568 ext
= add_cast (signed_type_for (m_limb_type
), t
);
1569 tree lpm1
= build_int_cst (unsigned_type_node
,
1571 tree n
= make_ssa_name (TREE_TYPE (ext
));
1572 g
= gimple_build_assign (n
, RSHIFT_EXPR
, ext
, lpm1
);
1574 ext
= add_cast (m_limb_type
, n
);
1575 m_data
[save_data_cnt
+ 1] = ext
;
1580 if (TYPE_UNSIGNED (rhs_type
) && m_first
)
1582 handle_operand (rhs1
, size_zero_node
);
1583 m_data
[save_data_cnt
+ 2]
1584 = build_int_cst (NULL_TREE
, m_data_cnt
);
1587 m_data_cnt
= tree_to_uhwi (m_data
[save_data_cnt
+ 2]);
1588 if (TYPE_UNSIGNED (rhs_type
))
1589 t
= build_zero_cst (m_limb_type
);
1590 else if (m_bb
&& m_data
[save_data_cnt
])
1591 t
= m_data
[save_data_cnt
];
1593 t
= m_data
[save_data_cnt
+ 1];
1595 tree type
= limb_access_type (lhs_type
, idx
);
1596 if (!useless_type_conversion_p (type
, m_limb_type
))
1597 t
= add_cast (type
, t
);
1598 m_first
= save_first
;
1602 else if (TREE_CODE (lhs_type
) == BITINT_TYPE
1603 && bitint_precision_kind (lhs_type
) >= bitint_prec_large
1604 && INTEGRAL_TYPE_P (rhs_type
))
1606 /* Add support for 3 or more limbs filled in from normal integral
1607 type if this assert fails. If no target chooses limb mode smaller
1608 than half of largest supported normal integral type, this will not
1610 gcc_assert (TYPE_PRECISION (rhs_type
) <= 2 * limb_prec
);
1611 tree r1
= NULL_TREE
, r2
= NULL_TREE
, rext
= NULL_TREE
;
1614 gimple_stmt_iterator save_gsi
= m_gsi
;
1616 if (gsi_end_p (m_gsi
))
1617 m_gsi
= gsi_after_labels (gsi_bb (m_gsi
));
1620 if (TREE_CODE (rhs_type
) == BITINT_TYPE
1621 && bitint_precision_kind (rhs_type
) == bitint_prec_middle
)
1623 tree type
= NULL_TREE
;
1624 rhs1
= maybe_cast_middle_bitint (&m_gsi
, rhs1
, type
);
1625 rhs_type
= TREE_TYPE (rhs1
);
1628 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (rhs1
)))
1629 r1
= add_cast (m_limb_type
, rhs1
);
1630 if (TYPE_PRECISION (rhs_type
) > limb_prec
)
1632 g
= gimple_build_assign (make_ssa_name (rhs_type
),
1634 build_int_cst (unsigned_type_node
,
1637 r2
= add_cast (m_limb_type
, gimple_assign_lhs (g
));
1639 if (TYPE_UNSIGNED (rhs_type
))
1640 rext
= build_zero_cst (m_limb_type
);
1643 rext
= add_cast (signed_type_for (m_limb_type
), r2
? r2
: r1
);
1644 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (rext
)),
1646 build_int_cst (unsigned_type_node
,
1649 rext
= add_cast (m_limb_type
, gimple_assign_lhs (g
));
1652 if (gsi_end_p (m_init_gsi
))
1653 m_init_gsi
= gsi_last_bb (gsi_bb (m_init_gsi
));
1655 gsi_prev (&m_init_gsi
);
1659 if (m_upwards_2limb
)
1664 prepare_data_in_out (r1
, idx
, &out1
, rext
);
1665 if (TYPE_PRECISION (rhs_type
) > limb_prec
)
1667 prepare_data_in_out (r2
, idx
, &out2
, rext
);
1670 m_data
[m_data_cnt
+ 1] = t
;
1673 m_data
[m_data_cnt
+ 1] = rext
;
1674 m_data
.safe_push (rext
);
1675 t
= m_data
[m_data_cnt
];
1677 else if (!tree_fits_uhwi_p (idx
))
1678 t
= m_data
[m_data_cnt
+ 1];
1681 tree type
= limb_access_type (lhs_type
, idx
);
1682 t
= m_data
[m_data_cnt
+ 2];
1683 if (!useless_type_conversion_p (type
, m_limb_type
))
1684 t
= add_cast (type
, t
);
1691 m_data
.safe_push (r1
);
1692 m_data
.safe_push (r2
);
1693 m_data
.safe_push (rext
);
1695 if (tree_fits_uhwi_p (idx
))
1697 tree type
= limb_access_type (lhs_type
, idx
);
1698 if (integer_zerop (idx
))
1699 t
= m_data
[m_data_cnt
];
1700 else if (TYPE_PRECISION (rhs_type
) > limb_prec
1701 && integer_onep (idx
))
1702 t
= m_data
[m_data_cnt
+ 1];
1704 t
= m_data
[m_data_cnt
+ 2];
1705 if (!useless_type_conversion_p (type
, m_limb_type
))
1706 t
= add_cast (type
, t
);
1710 g
= gimple_build_cond (NE_EXPR
, idx
, size_zero_node
,
1711 NULL_TREE
, NULL_TREE
);
1712 edge e2
, e3
, e4
= NULL
;
1713 if_then (g
, profile_probability::likely (), e2
, e3
);
1714 if (m_data
[m_data_cnt
+ 1])
1716 g
= gimple_build_cond (EQ_EXPR
, idx
, size_one_node
,
1717 NULL_TREE
, NULL_TREE
);
1719 edge e5
= split_block (gsi_bb (m_gsi
), g
);
1720 e4
= make_edge (e5
->src
, e2
->dest
, EDGE_TRUE_VALUE
);
1721 e2
= find_edge (e5
->dest
, e2
->dest
);
1722 e4
->probability
= profile_probability::unlikely ();
1723 e5
->flags
= EDGE_FALSE_VALUE
;
1724 e5
->probability
= e4
->probability
.invert ();
1726 m_gsi
= gsi_after_labels (e2
->dest
);
1727 t
= make_ssa_name (m_limb_type
);
1728 gphi
*phi
= create_phi_node (t
, e2
->dest
);
1729 add_phi_arg (phi
, m_data
[m_data_cnt
+ 2], e2
, UNKNOWN_LOCATION
);
1730 add_phi_arg (phi
, m_data
[m_data_cnt
], e3
, UNKNOWN_LOCATION
);
1732 add_phi_arg (phi
, m_data
[m_data_cnt
+ 1], e4
, UNKNOWN_LOCATION
);
1739 /* Helper function for handle_stmt method, handle a BIT_FIELD_REF. */
1742 bitint_large_huge::handle_bit_field_ref (tree op
, tree idx
)
1744 if (tree_fits_uhwi_p (idx
))
1747 m_data
.safe_push (NULL
);
1749 unsigned HOST_WIDE_INT sz
= tree_to_uhwi (TYPE_SIZE (m_limb_type
));
1750 tree bfr
= build3 (BIT_FIELD_REF
, m_limb_type
,
1751 TREE_OPERAND (op
, 0),
1752 TYPE_SIZE (m_limb_type
),
1753 size_binop (PLUS_EXPR
, TREE_OPERAND (op
, 2),
1754 bitsize_int (tree_to_uhwi (idx
) * sz
)));
1755 tree r
= make_ssa_name (m_limb_type
);
1756 gimple
*g
= gimple_build_assign (r
, bfr
);
1758 tree type
= limb_access_type (TREE_TYPE (op
), idx
);
1759 if (!useless_type_conversion_p (type
, m_limb_type
))
1760 r
= add_cast (type
, r
);
1766 unsigned HOST_WIDE_INT sz
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (op
)));
1769 if (bitwise_mode_for_size (sz
).exists (&mode
)
1770 && known_eq (GET_MODE_BITSIZE (mode
), sz
))
1771 type
= bitwise_type_for_mode (mode
);
1775 type
= TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (op
, 0)));
1777 if (TYPE_ALIGN (type
) < TYPE_ALIGN (TREE_TYPE (op
)))
1778 type
= build_aligned_type (type
, TYPE_ALIGN (TREE_TYPE (op
)));
1779 var
= create_tmp_var (type
);
1780 TREE_ADDRESSABLE (var
) = 1;
1782 if (mode
!= VOIDmode
)
1784 bfr
= build3 (BIT_FIELD_REF
, type
, TREE_OPERAND (op
, 0),
1785 TYPE_SIZE (type
), TREE_OPERAND (op
, 2));
1786 g
= gimple_build_assign (make_ssa_name (type
),
1787 BIT_FIELD_REF
, bfr
);
1788 gimple_set_location (g
, m_loc
);
1789 gsi_insert_after (&m_init_gsi
, g
, GSI_NEW_STMT
);
1790 bfr
= gimple_assign_lhs (g
);
1793 bfr
= TREE_OPERAND (op
, 0);
1794 g
= gimple_build_assign (var
, bfr
);
1795 gimple_set_location (g
, m_loc
);
1796 gsi_insert_after (&m_init_gsi
, g
, GSI_NEW_STMT
);
1797 if (mode
== VOIDmode
)
1799 unsigned HOST_WIDE_INT nelts
1800 = CEIL (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (op
))), limb_prec
);
1801 tree atype
= build_array_type_nelts (m_limb_type
, nelts
);
1802 var
= build2 (MEM_REF
, atype
, build_fold_addr_expr (var
),
1803 build_int_cst (build_pointer_type (type
),
1804 tree_to_uhwi (TREE_OPERAND (op
, 2))
1807 m_data
.safe_push (var
);
1810 var
= unshare_expr (m_data
[m_data_cnt
]);
1812 var
= limb_access (TREE_TYPE (op
), var
, idx
, false);
1813 tree r
= make_ssa_name (m_limb_type
);
1814 gimple
*g
= gimple_build_assign (r
, var
);
1819 /* Add a new EH edge from SRC to EH_EDGE->dest, where EH_EDGE
1820 is an older EH edge, and except for virtual PHIs duplicate the
1821 PHI argument from the EH_EDGE to the new EH edge. */
1824 add_eh_edge (basic_block src
, edge eh_edge
)
1826 edge e
= make_edge (src
, eh_edge
->dest
, EDGE_EH
);
1827 e
->probability
= profile_probability::very_unlikely ();
1828 for (gphi_iterator gsi
= gsi_start_phis (eh_edge
->dest
);
1829 !gsi_end_p (gsi
); gsi_next (&gsi
))
1831 gphi
*phi
= gsi
.phi ();
1832 tree lhs
= gimple_phi_result (phi
);
1833 if (virtual_operand_p (lhs
))
1835 const phi_arg_d
*arg
= gimple_phi_arg (phi
, eh_edge
->dest_idx
);
1836 add_phi_arg (phi
, arg
->def
, e
, arg
->locus
);
1840 /* Helper function for handle_stmt method, handle a load from memory. */
1843 bitint_large_huge::handle_load (gimple
*stmt
, tree idx
)
1845 tree rhs1
= gimple_assign_rhs1 (stmt
);
1846 tree rhs_type
= TREE_TYPE (rhs1
);
1847 bool eh
= stmt_ends_bb_p (stmt
);
1848 edge eh_edge
= NULL
;
1854 basic_block bb
= gimple_bb (stmt
);
1856 FOR_EACH_EDGE (eh_edge
, ei
, bb
->succs
)
1857 if (eh_edge
->flags
& EDGE_EH
)
1861 if (TREE_CODE (rhs1
) == COMPONENT_REF
1862 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (rhs1
, 1)))
1864 tree fld
= TREE_OPERAND (rhs1
, 1);
1865 /* For little-endian, we can allow as inputs bit-fields
1866 which start at a limb boundary. */
1867 gcc_assert (tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld
)));
1868 if (DECL_OFFSET_ALIGN (fld
) >= TYPE_ALIGN (TREE_TYPE (rhs1
))
1869 && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
)) % limb_prec
) == 0)
1871 /* Even if DECL_FIELD_BIT_OFFSET (fld) is a multiple of UNITS_PER_BIT,
1872 handle it normally for now. */
1873 if ((tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
)) % BITS_PER_UNIT
) == 0)
1875 tree repr
= DECL_BIT_FIELD_REPRESENTATIVE (fld
);
1876 poly_int64 bitoffset
;
1877 poly_uint64 field_offset
, repr_offset
;
1878 bool var_field_off
= false;
1879 if (poly_int_tree_p (DECL_FIELD_OFFSET (fld
), &field_offset
)
1880 && poly_int_tree_p (DECL_FIELD_OFFSET (repr
), &repr_offset
))
1881 bitoffset
= (field_offset
- repr_offset
) * BITS_PER_UNIT
;
1885 var_field_off
= true;
1887 bitoffset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
))
1888 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr
)));
1889 tree nrhs1
= build3 (COMPONENT_REF
, TREE_TYPE (repr
),
1890 TREE_OPERAND (rhs1
, 0), repr
,
1891 var_field_off
? TREE_OPERAND (rhs1
, 2) : NULL_TREE
);
1892 HOST_WIDE_INT bo
= bitoffset
.to_constant ();
1893 unsigned bo_idx
= (unsigned HOST_WIDE_INT
) bo
/ limb_prec
;
1894 unsigned bo_bit
= (unsigned HOST_WIDE_INT
) bo
% limb_prec
;
1899 gimple_stmt_iterator save_gsi
= m_gsi
;
1901 if (gsi_end_p (m_gsi
))
1902 m_gsi
= gsi_after_labels (gsi_bb (m_gsi
));
1905 tree t
= limb_access (NULL_TREE
, nrhs1
, size_int (bo_idx
), true);
1906 tree iv
= make_ssa_name (m_limb_type
);
1907 g
= gimple_build_assign (iv
, t
);
1911 maybe_duplicate_eh_stmt (g
, stmt
);
1914 edge e
= split_block (gsi_bb (m_gsi
), g
);
1915 add_eh_edge (e
->src
, eh_edge
);
1916 m_gsi
= gsi_after_labels (e
->dest
);
1917 if (gsi_bb (save_gsi
) == e
->src
)
1919 if (gsi_end_p (save_gsi
))
1920 save_gsi
= gsi_end_bb (e
->dest
);
1922 save_gsi
= gsi_for_stmt (gsi_stmt (save_gsi
));
1924 if (m_preheader_bb
== e
->src
)
1925 m_preheader_bb
= e
->dest
;
1929 if (gsi_end_p (m_init_gsi
))
1930 m_init_gsi
= gsi_last_bb (gsi_bb (m_init_gsi
));
1932 gsi_prev (&m_init_gsi
);
1935 prepare_data_in_out (iv
, idx
, &out
);
1936 out
= m_data
[m_data_cnt
];
1937 m_data
.safe_push (out
);
1941 m_data
.safe_push (NULL_TREE
);
1942 m_data
.safe_push (NULL_TREE
);
1943 m_data
.safe_push (NULL_TREE
);
1947 tree nidx0
= NULL_TREE
, nidx1
;
1948 tree iv
= m_data
[m_data_cnt
];
1949 if (m_cast_conditional
&& iv
)
1951 gcc_assert (!m_bitfld_load
);
1952 m_bitfld_load
= m_data_cnt
;
1954 if (tree_fits_uhwi_p (idx
))
1956 unsigned prec
= TYPE_PRECISION (rhs_type
);
1957 unsigned HOST_WIDE_INT i
= tree_to_uhwi (idx
);
1958 gcc_assert (i
* limb_prec
< prec
);
1959 nidx1
= size_int (i
+ bo_idx
+ 1);
1960 if ((i
+ 1) * limb_prec
> prec
)
1963 if (prec
+ bo_bit
<= (unsigned) limb_prec
)
1967 nidx0
= size_int (i
+ bo_idx
);
1977 nidx0
= make_ssa_name (sizetype
);
1978 g
= gimple_build_assign (nidx0
, PLUS_EXPR
, idx
,
1983 nidx1
= make_ssa_name (sizetype
);
1984 g
= gimple_build_assign (nidx1
, PLUS_EXPR
, idx
,
1985 size_int (bo_idx
+ 1));
1989 tree iv2
= NULL_TREE
;
1992 tree t
= limb_access (NULL_TREE
, nrhs1
, nidx0
, true);
1993 iv
= make_ssa_name (m_limb_type
);
1994 g
= gimple_build_assign (iv
, t
);
2000 bool conditional
= m_var_msb
&& !tree_fits_uhwi_p (idx
);
2001 unsigned prec
= TYPE_PRECISION (rhs_type
);
2004 if ((prec
% limb_prec
) == 0
2005 || ((prec
% limb_prec
) + bo_bit
> (unsigned) limb_prec
))
2006 conditional
= false;
2008 edge edge_true
= NULL
, edge_false
= NULL
;
2011 g
= gimple_build_cond (NE_EXPR
, idx
,
2012 size_int (prec
/ limb_prec
),
2013 NULL_TREE
, NULL_TREE
);
2014 if_then (g
, profile_probability::likely (),
2015 edge_true
, edge_false
);
2017 tree t
= limb_access (NULL_TREE
, nrhs1
, nidx1
, true);
2021 && !tree_fits_uhwi_p (idx
))
2022 iv2
= m_data
[m_data_cnt
+ 1];
2024 iv2
= make_ssa_name (m_limb_type
);
2025 g
= gimple_build_assign (iv2
, t
);
2029 maybe_duplicate_eh_stmt (g
, stmt
);
2032 edge e
= split_block (gsi_bb (m_gsi
), g
);
2033 m_gsi
= gsi_after_labels (e
->dest
);
2034 add_eh_edge (e
->src
, eh_edge
);
2039 tree iv3
= make_ssa_name (m_limb_type
);
2041 edge_true
= find_edge (gsi_bb (m_gsi
), edge_false
->dest
);
2042 gphi
*phi
= create_phi_node (iv3
, edge_true
->dest
);
2043 add_phi_arg (phi
, iv2
, edge_true
, UNKNOWN_LOCATION
);
2044 add_phi_arg (phi
, build_zero_cst (m_limb_type
),
2045 edge_false
, UNKNOWN_LOCATION
);
2046 m_gsi
= gsi_after_labels (edge_true
->dest
);
2050 g
= gimple_build_assign (make_ssa_name (m_limb_type
), RSHIFT_EXPR
,
2051 iv
, build_int_cst (unsigned_type_node
, bo_bit
));
2053 iv
= gimple_assign_lhs (g
);
2056 g
= gimple_build_assign (make_ssa_name (m_limb_type
), LSHIFT_EXPR
,
2057 iv2
, build_int_cst (unsigned_type_node
,
2058 limb_prec
- bo_bit
));
2060 g
= gimple_build_assign (make_ssa_name (m_limb_type
), BIT_IOR_EXPR
,
2061 gimple_assign_lhs (g
), iv
);
2063 iv
= gimple_assign_lhs (g
);
2064 if (m_data
[m_data_cnt
])
2065 m_data
[m_data_cnt
] = iv2
;
2067 if (tree_fits_uhwi_p (idx
))
2069 tree atype
= limb_access_type (rhs_type
, idx
);
2070 if (!useless_type_conversion_p (atype
, TREE_TYPE (iv
)))
2071 iv
= add_cast (atype
, iv
);
2078 /* Use write_p = true for loads with EH edges to make
2079 sure limb_access doesn't add a cast as separate
2080 statement after it. */
2081 rhs1
= limb_access (rhs_type
, rhs1
, idx
, eh
);
2082 tree ret
= make_ssa_name (TREE_TYPE (rhs1
));
2083 g
= gimple_build_assign (ret
, rhs1
);
2087 maybe_duplicate_eh_stmt (g
, stmt
);
2090 edge e
= split_block (gsi_bb (m_gsi
), g
);
2091 m_gsi
= gsi_after_labels (e
->dest
);
2092 add_eh_edge (e
->src
, eh_edge
);
2094 if (tree_fits_uhwi_p (idx
))
2096 tree atype
= limb_access_type (rhs_type
, idx
);
2097 if (!useless_type_conversion_p (atype
, TREE_TYPE (rhs1
)))
2098 ret
= add_cast (atype
, ret
);
2104 /* Return a limb IDX from a mergeable statement STMT. */
2107 bitint_large_huge::handle_stmt (gimple
*stmt
, tree idx
)
2109 tree lhs
, rhs1
, rhs2
= NULL_TREE
;
2111 switch (gimple_code (stmt
))
2114 if (gimple_assign_load_p (stmt
))
2115 return handle_load (stmt
, idx
);
2116 switch (gimple_assign_rhs_code (stmt
))
2121 rhs2
= handle_operand (gimple_assign_rhs2 (stmt
), idx
);
2124 rhs1
= handle_operand (gimple_assign_rhs1 (stmt
), idx
);
2125 lhs
= make_ssa_name (TREE_TYPE (rhs1
));
2126 g
= gimple_build_assign (lhs
, gimple_assign_rhs_code (stmt
),
2132 rhs1
= handle_operand (gimple_assign_rhs1 (stmt
), idx
);
2133 rhs2
= handle_operand (gimple_assign_rhs2 (stmt
), idx
);
2134 return handle_plus_minus (gimple_assign_rhs_code (stmt
),
2137 rhs2
= handle_operand (gimple_assign_rhs1 (stmt
), idx
);
2138 rhs1
= build_zero_cst (TREE_TYPE (rhs2
));
2139 return handle_plus_minus (MINUS_EXPR
, rhs1
, rhs2
, idx
);
2141 return handle_lshift (handle_operand (gimple_assign_rhs1 (stmt
),
2143 gimple_assign_rhs2 (stmt
), idx
);
2147 return handle_operand (gimple_assign_rhs1 (stmt
), idx
);
2149 return handle_cast (TREE_TYPE (gimple_assign_lhs (stmt
)),
2150 gimple_assign_rhs1 (stmt
), idx
);
2151 case VIEW_CONVERT_EXPR
:
2152 return handle_cast (TREE_TYPE (gimple_assign_lhs (stmt
)),
2153 TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0),
2156 return handle_bit_field_ref (gimple_assign_rhs1 (stmt
), idx
);
2167 /* Return minimum precision of OP at STMT.
2168 Positive value is minimum precision above which all bits
2169 are zero, negative means all bits above negation of the
2170 value are copies of the sign bit. */
2173 range_to_prec (tree op
, gimple
*stmt
)
2177 tree type
= TREE_TYPE (op
);
2178 unsigned int prec
= TYPE_PRECISION (type
);
2181 || !get_range_query (cfun
)->range_of_expr (r
, op
, stmt
)
2182 || r
.undefined_p ())
2184 if (TYPE_UNSIGNED (type
))
2187 return MIN ((int) -prec
, -2);
2190 if (!TYPE_UNSIGNED (TREE_TYPE (op
)))
2192 w
= r
.lower_bound ();
2195 int min_prec1
= wi::min_precision (w
, SIGNED
);
2196 w
= r
.upper_bound ();
2197 int min_prec2
= wi::min_precision (w
, SIGNED
);
2198 int min_prec
= MAX (min_prec1
, min_prec2
);
2199 return MIN (-min_prec
, -2);
2203 w
= r
.upper_bound ();
2204 int min_prec
= wi::min_precision (w
, UNSIGNED
);
2205 return MAX (min_prec
, 1);
2208 /* Return address of the first limb of OP and write into *PREC
2209 its precision. If positive, the operand is zero extended
2210 from that precision, if it is negative, the operand is sign-extended
2211 from -*PREC. If PREC_STORED is NULL, it is the toplevel call,
2212 otherwise *PREC_STORED is prec from the innermost call without
2213 range optimizations. */
2216 bitint_large_huge::handle_operand_addr (tree op
, gimple
*stmt
,
2217 int *prec_stored
, int *prec
)
2220 location_t loc_save
= m_loc
;
2221 if ((TREE_CODE (TREE_TYPE (op
)) != BITINT_TYPE
2222 || bitint_precision_kind (TREE_TYPE (op
)) < bitint_prec_large
)
2223 && TREE_CODE (op
) != INTEGER_CST
)
2226 *prec
= range_to_prec (op
, stmt
);
2227 bitint_prec_kind kind
= bitint_prec_small
;
2228 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (op
)));
2229 if (TREE_CODE (TREE_TYPE (op
)) == BITINT_TYPE
)
2230 kind
= bitint_precision_kind (TREE_TYPE (op
));
2231 if (kind
== bitint_prec_middle
)
2233 tree type
= NULL_TREE
;
2234 op
= maybe_cast_middle_bitint (&m_gsi
, op
, type
);
2236 tree op_type
= TREE_TYPE (op
);
2237 unsigned HOST_WIDE_INT nelts
2238 = CEIL (TYPE_PRECISION (op_type
), limb_prec
);
2239 /* Add support for 3 or more limbs filled in from normal
2240 integral type if this assert fails. If no target chooses
2241 limb mode smaller than half of largest supported normal
2242 integral type, this will not be needed. */
2243 gcc_assert (nelts
<= 2);
2245 *prec_stored
= (TYPE_UNSIGNED (op_type
)
2246 ? TYPE_PRECISION (op_type
)
2247 : -TYPE_PRECISION (op_type
));
2248 if (*prec
<= limb_prec
&& *prec
>= -limb_prec
)
2253 if (TYPE_UNSIGNED (op_type
))
2255 if (*prec_stored
> limb_prec
)
2256 *prec_stored
= limb_prec
;
2258 else if (*prec_stored
< -limb_prec
)
2259 *prec_stored
= -limb_prec
;
2262 tree atype
= build_array_type_nelts (m_limb_type
, nelts
);
2263 tree var
= create_tmp_var (atype
);
2265 if (!useless_type_conversion_p (m_limb_type
, op_type
))
2266 t1
= add_cast (m_limb_type
, t1
);
2267 tree v
= build4 (ARRAY_REF
, m_limb_type
, var
, size_zero_node
,
2268 NULL_TREE
, NULL_TREE
);
2269 gimple
*g
= gimple_build_assign (v
, t1
);
2273 tree lp
= build_int_cst (unsigned_type_node
, limb_prec
);
2274 g
= gimple_build_assign (make_ssa_name (op_type
),
2275 RSHIFT_EXPR
, op
, lp
);
2277 tree t2
= gimple_assign_lhs (g
);
2278 t2
= add_cast (m_limb_type
, t2
);
2279 v
= build4 (ARRAY_REF
, m_limb_type
, var
, size_one_node
,
2280 NULL_TREE
, NULL_TREE
);
2281 g
= gimple_build_assign (v
, t2
);
2284 tree ret
= build_fold_addr_expr (var
);
2285 if (!stmt_ends_bb_p (gsi_stmt (m_gsi
)))
2287 tree clobber
= build_clobber (atype
, CLOBBER_STORAGE_END
);
2288 g
= gimple_build_assign (var
, clobber
);
2289 gsi_insert_after (&m_gsi
, g
, GSI_SAME_STMT
);
2294 switch (TREE_CODE (op
))
2298 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (op
)))
2300 gimple
*g
= SSA_NAME_DEF_STMT (op
);
2302 m_loc
= gimple_location (g
);
2303 if (gimple_assign_load_p (g
))
2305 *prec
= range_to_prec (op
, NULL
);
2307 *prec_stored
= (TYPE_UNSIGNED (TREE_TYPE (op
))
2308 ? TYPE_PRECISION (TREE_TYPE (op
))
2309 : -TYPE_PRECISION (TREE_TYPE (op
)));
2310 ret
= build_fold_addr_expr (gimple_assign_rhs1 (g
));
2311 ret
= force_gimple_operand_gsi (&m_gsi
, ret
, true,
2312 NULL_TREE
, true, GSI_SAME_STMT
);
2314 else if (gimple_code (g
) == GIMPLE_NOP
)
2316 *prec
= TYPE_UNSIGNED (TREE_TYPE (op
)) ? limb_prec
: -limb_prec
;
2318 *prec_stored
= *prec
;
2319 tree var
= create_tmp_var (m_limb_type
);
2320 TREE_ADDRESSABLE (var
) = 1;
2321 ret
= build_fold_addr_expr (var
);
2322 if (!stmt_ends_bb_p (gsi_stmt (m_gsi
)))
2324 tree clobber
= build_clobber (m_limb_type
,
2325 CLOBBER_STORAGE_END
);
2326 g
= gimple_build_assign (var
, clobber
);
2327 gsi_insert_after (&m_gsi
, g
, GSI_SAME_STMT
);
2332 gcc_assert (gimple_assign_cast_p (g
));
2333 tree rhs1
= gimple_assign_rhs1 (g
);
2334 bitint_prec_kind kind
= bitint_prec_small
;
2335 if (TREE_CODE (rhs1
) == VIEW_CONVERT_EXPR
)
2336 rhs1
= TREE_OPERAND (rhs1
, 0);
2337 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (rhs1
)));
2338 if (TREE_CODE (TREE_TYPE (rhs1
)) == BITINT_TYPE
)
2339 kind
= bitint_precision_kind (TREE_TYPE (rhs1
));
2340 if (kind
>= bitint_prec_large
)
2342 tree lhs_type
= TREE_TYPE (op
);
2343 tree rhs_type
= TREE_TYPE (rhs1
);
2344 int prec_stored_val
= 0;
2345 ret
= handle_operand_addr (rhs1
, g
, &prec_stored_val
, prec
);
2346 if (TYPE_PRECISION (lhs_type
) > TYPE_PRECISION (rhs_type
))
2348 if (TYPE_UNSIGNED (lhs_type
)
2349 && !TYPE_UNSIGNED (rhs_type
))
2350 gcc_assert (*prec
>= 0 || prec_stored
== NULL
);
2354 if (*prec
> 0 && *prec
< TYPE_PRECISION (lhs_type
))
2356 else if (TYPE_UNSIGNED (lhs_type
))
2358 gcc_assert (*prec
> 0
2359 || prec_stored_val
> 0
2360 || (-prec_stored_val
2361 >= TYPE_PRECISION (lhs_type
)));
2362 *prec
= TYPE_PRECISION (lhs_type
);
2364 else if (*prec
< 0 && -*prec
< TYPE_PRECISION (lhs_type
))
2367 *prec
= -TYPE_PRECISION (lhs_type
);
2382 int p
= var_to_partition (m_map
, op
);
2383 gcc_assert (m_vars
[p
] != NULL_TREE
);
2384 *prec
= range_to_prec (op
, stmt
);
2386 *prec_stored
= (TYPE_UNSIGNED (TREE_TYPE (op
))
2387 ? TYPE_PRECISION (TREE_TYPE (op
))
2388 : -TYPE_PRECISION (TREE_TYPE (op
)));
2389 return build_fold_addr_expr (m_vars
[p
]);
2392 unsigned int min_prec
, mp
;
2394 w
= wi::to_wide (op
);
2395 if (tree_int_cst_sgn (op
) >= 0)
2397 min_prec
= wi::min_precision (w
, UNSIGNED
);
2398 *prec
= MAX (min_prec
, 1);
2402 min_prec
= wi::min_precision (w
, SIGNED
);
2403 *prec
= MIN ((int) -min_prec
, -2);
2405 mp
= CEIL (min_prec
, limb_prec
) * limb_prec
;
2408 if (mp
>= (unsigned) TYPE_PRECISION (TREE_TYPE (op
))
2409 && (TREE_CODE (TREE_TYPE (op
)) == BITINT_TYPE
2410 || TYPE_PRECISION (TREE_TYPE (op
)) <= limb_prec
))
2411 type
= TREE_TYPE (op
);
2413 type
= build_bitint_type (mp
, 1);
2414 if (TREE_CODE (type
) != BITINT_TYPE
2415 || bitint_precision_kind (type
) == bitint_prec_small
)
2417 if (TYPE_PRECISION (type
) <= limb_prec
)
2421 while (bitint_precision_kind (mp
) == bitint_prec_small
)
2423 /* This case is for targets which e.g. have 64-bit
2424 limb but categorize up to 128-bits _BitInts as
2425 small. We could use type of m_limb_type[2] and
2426 similar instead to save space. */
2427 type
= build_bitint_type (mp
, 1);
2432 if (tree_int_cst_sgn (op
) >= 0)
2433 *prec_stored
= MAX (TYPE_PRECISION (type
), 1);
2435 *prec_stored
= MIN ((int) -TYPE_PRECISION (type
), -2);
2437 op
= tree_output_constant_def (fold_convert (type
, op
));
2438 return build_fold_addr_expr (op
);
2444 /* Helper function, create a loop before the current location,
2445 start with sizetype INIT value from the preheader edge. Return
2446 a PHI result and set *IDX_NEXT to SSA_NAME it creates and uses
2447 from the latch edge. */
2450 bitint_large_huge::create_loop (tree init
, tree
*idx_next
)
2452 if (!gsi_end_p (m_gsi
))
2455 m_gsi
= gsi_last_bb (gsi_bb (m_gsi
));
2456 edge e1
= split_block (gsi_bb (m_gsi
), gsi_stmt (m_gsi
));
2457 edge e2
= split_block (e1
->dest
, (gimple
*) NULL
);
2458 edge e3
= make_edge (e1
->dest
, e1
->dest
, EDGE_TRUE_VALUE
);
2459 e3
->probability
= profile_probability::very_unlikely ();
2460 e2
->flags
= EDGE_FALSE_VALUE
;
2461 e2
->probability
= e3
->probability
.invert ();
2462 tree idx
= make_ssa_name (sizetype
);
2463 gphi
*phi
= create_phi_node (idx
, e1
->dest
);
2464 add_phi_arg (phi
, init
, e1
, UNKNOWN_LOCATION
);
2465 *idx_next
= make_ssa_name (sizetype
);
2466 add_phi_arg (phi
, *idx_next
, e3
, UNKNOWN_LOCATION
);
2467 m_gsi
= gsi_after_labels (e1
->dest
);
2469 m_preheader_bb
= e1
->src
;
2470 class loop
*loop
= alloc_loop ();
2471 loop
->header
= e1
->dest
;
2472 add_loop (loop
, e1
->src
->loop_father
);
2476 /* Lower large/huge _BitInt statement mergeable or similar STMT which can be
2477 lowered using iteration from the least significant limb up to the most
2478 significant limb. For large _BitInt it is emitted as straight line code
2479 before current location, for huge _BitInt as a loop handling two limbs
2480 at once, followed by handling up to limbs in straight line code (at most
2481 one full and one partial limb). It can also handle EQ_EXPR/NE_EXPR
2482 comparisons, in that case CMP_CODE should be the comparison code and
2483 CMP_OP1/CMP_OP2 the comparison operands. */
2486 bitint_large_huge::lower_mergeable_stmt (gimple
*stmt
, tree_code
&cmp_code
,
2487 tree cmp_op1
, tree cmp_op2
)
2489 bool eq_p
= cmp_code
!= ERROR_MARK
;
2492 type
= TREE_TYPE (cmp_op1
);
2494 type
= TREE_TYPE (gimple_assign_lhs (stmt
));
2495 gcc_assert (TREE_CODE (type
) == BITINT_TYPE
);
2496 bitint_prec_kind kind
= bitint_precision_kind (type
);
2497 gcc_assert (kind
>= bitint_prec_large
);
2499 tree lhs
= gimple_get_lhs (stmt
);
2500 tree rhs1
, lhs_type
= lhs
? TREE_TYPE (lhs
) : NULL_TREE
;
2502 && TREE_CODE (lhs
) == SSA_NAME
2503 && TREE_CODE (TREE_TYPE (lhs
)) == BITINT_TYPE
2504 && bitint_precision_kind (TREE_TYPE (lhs
)) >= bitint_prec_large
)
2506 int p
= var_to_partition (m_map
, lhs
);
2507 gcc_assert (m_vars
[p
] != NULL_TREE
);
2508 m_lhs
= lhs
= m_vars
[p
];
2510 unsigned cnt
, rem
= 0, end
= 0, prec
= TYPE_PRECISION (type
);
2512 tree ext
= NULL_TREE
, store_operand
= NULL_TREE
;
2514 basic_block eh_pad
= NULL
;
2515 tree nlhs
= NULL_TREE
;
2516 unsigned HOST_WIDE_INT bo_idx
= 0;
2517 unsigned HOST_WIDE_INT bo_bit
= 0;
2518 tree bf_cur
= NULL_TREE
, bf_next
= NULL_TREE
;
2519 if (gimple_store_p (stmt
))
2521 store_operand
= gimple_assign_rhs1 (stmt
);
2522 eh
= stmt_ends_bb_p (stmt
);
2527 basic_block bb
= gimple_bb (stmt
);
2529 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
2530 if (e
->flags
& EDGE_EH
)
2536 if (TREE_CODE (lhs
) == COMPONENT_REF
2537 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs
, 1)))
2539 tree fld
= TREE_OPERAND (lhs
, 1);
2540 gcc_assert (tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld
)));
2541 tree repr
= DECL_BIT_FIELD_REPRESENTATIVE (fld
);
2542 poly_int64 bitoffset
;
2543 poly_uint64 field_offset
, repr_offset
;
2544 if ((tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
)) % BITS_PER_UNIT
) == 0)
2548 bool var_field_off
= false;
2549 if (poly_int_tree_p (DECL_FIELD_OFFSET (fld
), &field_offset
)
2550 && poly_int_tree_p (DECL_FIELD_OFFSET (repr
), &repr_offset
))
2551 bitoffset
= (field_offset
- repr_offset
) * BITS_PER_UNIT
;
2555 var_field_off
= true;
2557 bitoffset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
))
2558 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr
)));
2559 nlhs
= build3 (COMPONENT_REF
, TREE_TYPE (repr
),
2560 TREE_OPERAND (lhs
, 0), repr
,
2562 ? TREE_OPERAND (lhs
, 2) : NULL_TREE
);
2563 HOST_WIDE_INT bo
= bitoffset
.to_constant ();
2564 bo_idx
= (unsigned HOST_WIDE_INT
) bo
/ limb_prec
;
2565 bo_bit
= (unsigned HOST_WIDE_INT
) bo
% limb_prec
;
2570 && TREE_CODE (store_operand
) == SSA_NAME
2572 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (store_operand
)))
2573 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (store_operand
)))
2574 || gimple_assign_cast_p (stmt
))
2576 rhs1
= gimple_assign_rhs1 (store_operand
2577 ? SSA_NAME_DEF_STMT (store_operand
)
2579 if (TREE_CODE (rhs1
) == VIEW_CONVERT_EXPR
)
2580 rhs1
= TREE_OPERAND (rhs1
, 0);
2581 /* Optimize mergeable ops ending with widening cast to _BitInt
2582 (or followed by store). We can lower just the limbs of the
2583 cast operand and widen afterwards. */
2584 if (TREE_CODE (rhs1
) == SSA_NAME
2586 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (rhs1
)))
2587 && TREE_CODE (TREE_TYPE (rhs1
)) == BITINT_TYPE
2588 && bitint_precision_kind (TREE_TYPE (rhs1
)) >= bitint_prec_large
2589 && (CEIL ((unsigned) TYPE_PRECISION (TREE_TYPE (rhs1
)),
2590 limb_prec
) < CEIL (prec
, limb_prec
)
2591 || (kind
== bitint_prec_huge
2592 && TYPE_PRECISION (TREE_TYPE (rhs1
)) < prec
)))
2594 store_operand
= rhs1
;
2595 prec
= TYPE_PRECISION (TREE_TYPE (rhs1
));
2596 kind
= bitint_precision_kind (TREE_TYPE (rhs1
));
2597 if (!TYPE_UNSIGNED (TREE_TYPE (rhs1
)))
2601 tree idx
= NULL_TREE
, idx_first
= NULL_TREE
, idx_next
= NULL_TREE
;
2602 if (kind
== bitint_prec_large
)
2603 cnt
= CEIL (prec
, limb_prec
);
2606 rem
= (prec
% (2 * limb_prec
));
2607 end
= (prec
- rem
) / limb_prec
;
2608 cnt
= 2 + CEIL (rem
, limb_prec
);
2609 idx
= idx_first
= create_loop (size_zero_node
, &idx_next
);
2612 basic_block edge_bb
= NULL
;
2615 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
2617 edge e
= split_block (gsi_bb (gsi
), gsi_stmt (gsi
));
2619 if (kind
== bitint_prec_large
)
2620 m_gsi
= gsi_end_bb (edge_bb
);
2623 m_after_stmt
= stmt
;
2624 if (kind
!= bitint_prec_large
)
2625 m_upwards_2limb
= end
;
2629 = (prec
!= (unsigned) TYPE_PRECISION (type
)
2630 && (CEIL ((unsigned) TYPE_PRECISION (type
), limb_prec
)
2631 > CEIL (prec
, limb_prec
)));
2633 for (unsigned i
= 0; i
< cnt
; i
++)
2636 if (kind
== bitint_prec_large
)
2639 idx
= size_int (end
+ (i
> 2));
2642 rhs1
= handle_operand (cmp_op1
, idx
);
2643 tree rhs2
= handle_operand (cmp_op2
, idx
);
2644 g
= gimple_build_cond (NE_EXPR
, rhs1
, rhs2
, NULL_TREE
, NULL_TREE
);
2646 edge e1
= split_block (gsi_bb (m_gsi
), g
);
2647 e1
->flags
= EDGE_FALSE_VALUE
;
2648 edge e2
= make_edge (e1
->src
, gimple_bb (stmt
), EDGE_TRUE_VALUE
);
2649 e1
->probability
= profile_probability::unlikely ();
2650 e2
->probability
= e1
->probability
.invert ();
2652 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e2
->src
);
2653 m_gsi
= gsi_after_labels (e1
->dest
);
2658 rhs1
= handle_operand (store_operand
, idx
);
2660 rhs1
= handle_stmt (stmt
, idx
);
2661 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (rhs1
)))
2662 rhs1
= add_cast (m_limb_type
, rhs1
);
2663 if (sext
&& i
== cnt
- 1)
2668 if (tree_fits_uhwi_p (idx
))
2669 nidx
= size_int (tree_to_uhwi (idx
) + bo_idx
);
2672 nidx
= make_ssa_name (sizetype
);
2673 g
= gimple_build_assign (nidx
, PLUS_EXPR
, idx
,
2679 basic_block new_bb
= NULL
;
2680 /* Handle stores into bit-fields. */
2686 if (kind
!= bitint_prec_large
)
2688 prepare_data_in_out (build_zero_cst (m_limb_type
),
2690 bf_next
= m_data
.pop ();
2691 bf_cur
= m_data
.pop ();
2692 g
= gimple_build_cond (EQ_EXPR
, idx
, size_zero_node
,
2693 NULL_TREE
, NULL_TREE
);
2695 if_then_else (g
, profile_probability::unlikely (),
2700 = build_nonstandard_integer_type (limb_prec
- bo_bit
, 1);
2701 tree bfr
= build_bit_field_ref (ftype
, unshare_expr (nlhs
),
2703 bo_idx
* limb_prec
+ bo_bit
);
2704 tree t
= add_cast (ftype
, rhs1
);
2705 g
= gimple_build_assign (bfr
, t
);
2709 maybe_duplicate_eh_stmt (g
, stmt
);
2712 edge e
= split_block (gsi_bb (m_gsi
), g
);
2713 m_gsi
= gsi_after_labels (e
->dest
);
2714 add_eh_edge (e
->src
,
2715 find_edge (gimple_bb (stmt
), eh_pad
));
2718 if (kind
== bitint_prec_large
)
2724 m_gsi
= gsi_after_labels (e2
->src
);
2728 tree t1
= make_ssa_name (m_limb_type
);
2729 tree t2
= make_ssa_name (m_limb_type
);
2730 tree t3
= make_ssa_name (m_limb_type
);
2731 g
= gimple_build_assign (t1
, RSHIFT_EXPR
, bf_cur
,
2732 build_int_cst (unsigned_type_node
,
2733 limb_prec
- bo_bit
));
2735 g
= gimple_build_assign (t2
, LSHIFT_EXPR
, rhs1
,
2736 build_int_cst (unsigned_type_node
,
2740 g
= gimple_build_assign (t3
, BIT_IOR_EXPR
, t1
, t2
);
2743 if (bf_next
&& i
== 1)
2745 g
= gimple_build_assign (bf_next
, bf_cur
);
2752 /* Handle bit-field access to partial last limb if needed. */
2756 && tree_fits_uhwi_p (idx
))
2758 unsigned int tprec
= TYPE_PRECISION (type
);
2759 unsigned int rprec
= (tprec
- 1) % limb_prec
+ 1;
2760 if (rprec
+ bo_bit
< (unsigned) limb_prec
)
2763 = build_nonstandard_integer_type (rprec
+ bo_bit
, 1);
2765 = build_bit_field_ref (ftype
, unshare_expr (nlhs
),
2767 (bo_idx
+ tprec
/ limb_prec
)
2769 tree t
= add_cast (ftype
, rhs1
);
2770 g
= gimple_build_assign (bfr
, t
);
2774 else if (rprec
+ bo_bit
== (unsigned) limb_prec
)
2777 /* Otherwise, stores to any other lhs. */
2780 tree l
= limb_access (nlhs
? NULL_TREE
: lhs_type
,
2781 nlhs
? nlhs
: lhs
, nidx
, true);
2782 g
= gimple_build_assign (l
, rhs1
);
2787 maybe_duplicate_eh_stmt (g
, stmt
);
2790 edge e
= split_block (gsi_bb (m_gsi
), g
);
2791 m_gsi
= gsi_after_labels (e
->dest
);
2792 add_eh_edge (e
->src
,
2793 find_edge (gimple_bb (stmt
), eh_pad
));
2797 m_gsi
= gsi_after_labels (new_bb
);
2801 if (kind
== bitint_prec_huge
&& i
<= 1)
2805 idx
= make_ssa_name (sizetype
);
2806 g
= gimple_build_assign (idx
, PLUS_EXPR
, idx_first
,
2812 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx_first
,
2815 g
= gimple_build_cond (NE_EXPR
, idx_next
, size_int (end
),
2816 NULL_TREE
, NULL_TREE
);
2819 m_gsi
= gsi_after_labels (edge_bb
);
2821 m_gsi
= gsi_for_stmt (stmt
);
2831 ext
= add_cast (signed_type_for (m_limb_type
), ext
);
2832 tree lpm1
= build_int_cst (unsigned_type_node
,
2834 tree n
= make_ssa_name (TREE_TYPE (ext
));
2835 g
= gimple_build_assign (n
, RSHIFT_EXPR
, ext
, lpm1
);
2837 ext
= add_cast (m_limb_type
, n
);
2840 ext
= build_zero_cst (m_limb_type
);
2841 kind
= bitint_precision_kind (type
);
2842 unsigned start
= CEIL (prec
, limb_prec
);
2843 prec
= TYPE_PRECISION (type
);
2844 idx
= idx_first
= idx_next
= NULL_TREE
;
2845 if (prec
<= (start
+ 2 + (bo_bit
!= 0)) * limb_prec
)
2846 kind
= bitint_prec_large
;
2847 if (kind
== bitint_prec_large
)
2848 cnt
= CEIL (prec
, limb_prec
) - start
;
2851 rem
= prec
% limb_prec
;
2852 end
= (prec
- rem
) / limb_prec
;
2853 cnt
= (bo_bit
!= 0) + 1 + (rem
!= 0);
2855 for (unsigned i
= 0; i
< cnt
; i
++)
2857 if (kind
== bitint_prec_large
|| (i
== 0 && bo_bit
!= 0))
2858 idx
= size_int (start
+ i
);
2859 else if (i
== cnt
- 1 && (rem
!= 0))
2860 idx
= size_int (end
);
2861 else if (i
== (bo_bit
!= 0))
2862 idx
= create_loop (size_int (start
+ i
), &idx_next
);
2864 if (bf_cur
!= NULL_TREE
&& bf_cur
!= ext
)
2866 tree t1
= make_ssa_name (m_limb_type
);
2867 g
= gimple_build_assign (t1
, RSHIFT_EXPR
, bf_cur
,
2868 build_int_cst (unsigned_type_node
,
2869 limb_prec
- bo_bit
));
2871 if (integer_zerop (ext
))
2875 tree t2
= make_ssa_name (m_limb_type
);
2876 rhs1
= make_ssa_name (m_limb_type
);
2877 g
= gimple_build_assign (t2
, LSHIFT_EXPR
, ext
,
2878 build_int_cst (unsigned_type_node
,
2881 g
= gimple_build_assign (rhs1
, BIT_IOR_EXPR
, t1
, t2
);
2889 if (tree_fits_uhwi_p (idx
))
2890 nidx
= size_int (tree_to_uhwi (idx
) + bo_idx
);
2893 nidx
= make_ssa_name (sizetype
);
2894 g
= gimple_build_assign (nidx
, PLUS_EXPR
, idx
,
2900 /* Handle bit-field access to partial last limb if needed. */
2901 if (nlhs
&& i
== cnt
- 1)
2903 unsigned int tprec
= TYPE_PRECISION (type
);
2904 unsigned int rprec
= (tprec
- 1) % limb_prec
+ 1;
2905 if (rprec
+ bo_bit
< (unsigned) limb_prec
)
2908 = build_nonstandard_integer_type (rprec
+ bo_bit
, 1);
2910 = build_bit_field_ref (ftype
, unshare_expr (nlhs
),
2912 (bo_idx
+ tprec
/ limb_prec
)
2914 tree t
= add_cast (ftype
, rhs1
);
2915 g
= gimple_build_assign (bfr
, t
);
2919 else if (rprec
+ bo_bit
== (unsigned) limb_prec
)
2922 /* Otherwise, stores to any other lhs. */
2925 tree l
= limb_access (nlhs
? NULL_TREE
: lhs_type
,
2926 nlhs
? nlhs
: lhs
, nidx
, true);
2927 g
= gimple_build_assign (l
, rhs1
);
2932 maybe_duplicate_eh_stmt (g
, stmt
);
2935 edge e
= split_block (gsi_bb (m_gsi
), g
);
2936 m_gsi
= gsi_after_labels (e
->dest
);
2937 add_eh_edge (e
->src
, find_edge (gimple_bb (stmt
), eh_pad
));
2940 if (kind
== bitint_prec_huge
&& i
== (bo_bit
!= 0))
2942 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
,
2945 g
= gimple_build_cond (NE_EXPR
, idx_next
, size_int (end
),
2946 NULL_TREE
, NULL_TREE
);
2948 m_gsi
= gsi_for_stmt (stmt
);
2953 if (bf_cur
!= NULL_TREE
)
2955 unsigned int tprec
= TYPE_PRECISION (type
);
2956 unsigned int rprec
= (tprec
+ bo_bit
) % limb_prec
;
2957 tree ftype
= build_nonstandard_integer_type (rprec
, 1);
2958 tree bfr
= build_bit_field_ref (ftype
, unshare_expr (nlhs
),
2960 (bo_idx
+ (tprec
+ bo_bit
) / limb_prec
)
2965 rhs1
= make_ssa_name (TREE_TYPE (rhs1
));
2966 g
= gimple_build_assign (rhs1
, RSHIFT_EXPR
, bf_cur
,
2967 build_int_cst (unsigned_type_node
,
2968 limb_prec
- bo_bit
));
2971 rhs1
= add_cast (ftype
, rhs1
);
2972 g
= gimple_build_assign (bfr
, rhs1
);
2976 maybe_duplicate_eh_stmt (g
, stmt
);
2979 edge e
= split_block (gsi_bb (m_gsi
), g
);
2980 m_gsi
= gsi_after_labels (e
->dest
);
2981 add_eh_edge (e
->src
, find_edge (gimple_bb (stmt
), eh_pad
));
2986 if (gimple_store_p (stmt
))
2988 unlink_stmt_vdef (stmt
);
2989 release_ssa_name (gimple_vdef (stmt
));
2990 gsi_remove (&m_gsi
, true);
2994 lhs
= make_ssa_name (boolean_type_node
);
2995 basic_block bb
= gimple_bb (stmt
);
2996 gphi
*phi
= create_phi_node (lhs
, bb
);
2997 edge e
= find_edge (gsi_bb (m_gsi
), bb
);
2998 unsigned int n
= EDGE_COUNT (bb
->preds
);
2999 for (unsigned int i
= 0; i
< n
; i
++)
3001 edge e2
= EDGE_PRED (bb
, i
);
3002 add_phi_arg (phi
, e
== e2
? boolean_true_node
: boolean_false_node
,
3003 e2
, UNKNOWN_LOCATION
);
3005 cmp_code
= cmp_code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3012 /* Handle a large/huge _BitInt comparison statement STMT other than
3013 EQ_EXPR/NE_EXPR. CMP_CODE, CMP_OP1 and CMP_OP2 meaning is like in
3014 lower_mergeable_stmt. The {GT,GE,LT,LE}_EXPR comparisons are
3015 lowered by iteration from the most significant limb downwards to
3016 the least significant one, for large _BitInt in straight line code,
3017 otherwise with most significant limb handled in
3018 straight line code followed by a loop handling one limb at a time.
3019 Comparisons with unsigned huge _BitInt with precisions which are
3020 multiples of limb precision can use just the loop and don't need to
3021 handle most significant limb before the loop. The loop or straight
3022 line code jumps to final basic block if a particular pair of limbs
3026 bitint_large_huge::lower_comparison_stmt (gimple
*stmt
, tree_code
&cmp_code
,
3027 tree cmp_op1
, tree cmp_op2
)
3029 tree type
= TREE_TYPE (cmp_op1
);
3030 gcc_assert (TREE_CODE (type
) == BITINT_TYPE
);
3031 bitint_prec_kind kind
= bitint_precision_kind (type
);
3032 gcc_assert (kind
>= bitint_prec_large
);
3034 if (!TYPE_UNSIGNED (type
)
3035 && integer_zerop (cmp_op2
)
3036 && (cmp_code
== GE_EXPR
|| cmp_code
== LT_EXPR
))
3038 unsigned end
= CEIL ((unsigned) TYPE_PRECISION (type
), limb_prec
) - 1;
3039 tree idx
= size_int (end
);
3041 tree rhs1
= handle_operand (cmp_op1
, idx
);
3042 if (TYPE_UNSIGNED (TREE_TYPE (rhs1
)))
3044 tree stype
= signed_type_for (TREE_TYPE (rhs1
));
3045 rhs1
= add_cast (stype
, rhs1
);
3047 tree lhs
= make_ssa_name (boolean_type_node
);
3048 g
= gimple_build_assign (lhs
, cmp_code
, rhs1
,
3049 build_zero_cst (TREE_TYPE (rhs1
)));
3055 unsigned cnt
, rem
= 0, end
= 0;
3056 tree idx
= NULL_TREE
, idx_next
= NULL_TREE
;
3057 if (kind
== bitint_prec_large
)
3058 cnt
= CEIL ((unsigned) TYPE_PRECISION (type
), limb_prec
);
3061 rem
= ((unsigned) TYPE_PRECISION (type
) % limb_prec
);
3062 if (rem
== 0 && !TYPE_UNSIGNED (type
))
3064 end
= ((unsigned) TYPE_PRECISION (type
) - rem
) / limb_prec
;
3065 cnt
= 1 + (rem
!= 0);
3068 basic_block edge_bb
= NULL
;
3069 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
3071 edge e
= split_block (gsi_bb (gsi
), gsi_stmt (gsi
));
3073 m_gsi
= gsi_end_bb (edge_bb
);
3075 edge
*edges
= XALLOCAVEC (edge
, cnt
* 2);
3076 for (unsigned i
= 0; i
< cnt
; i
++)
3079 if (kind
== bitint_prec_large
)
3080 idx
= size_int (cnt
- i
- 1);
3081 else if (i
== cnt
- 1)
3082 idx
= create_loop (size_int (end
- 1), &idx_next
);
3084 idx
= size_int (end
);
3085 tree rhs1
= handle_operand (cmp_op1
, idx
);
3086 tree rhs2
= handle_operand (cmp_op2
, idx
);
3088 && !TYPE_UNSIGNED (type
)
3089 && TYPE_UNSIGNED (TREE_TYPE (rhs1
)))
3091 tree stype
= signed_type_for (TREE_TYPE (rhs1
));
3092 rhs1
= add_cast (stype
, rhs1
);
3093 rhs2
= add_cast (stype
, rhs2
);
3095 g
= gimple_build_cond (GT_EXPR
, rhs1
, rhs2
, NULL_TREE
, NULL_TREE
);
3097 edge e1
= split_block (gsi_bb (m_gsi
), g
);
3098 e1
->flags
= EDGE_FALSE_VALUE
;
3099 edge e2
= make_edge (e1
->src
, gimple_bb (stmt
), EDGE_TRUE_VALUE
);
3100 e1
->probability
= profile_probability::likely ();
3101 e2
->probability
= e1
->probability
.invert ();
3103 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e2
->src
);
3104 m_gsi
= gsi_after_labels (e1
->dest
);
3106 g
= gimple_build_cond (LT_EXPR
, rhs1
, rhs2
, NULL_TREE
, NULL_TREE
);
3108 e1
= split_block (gsi_bb (m_gsi
), g
);
3109 e1
->flags
= EDGE_FALSE_VALUE
;
3110 e2
= make_edge (e1
->src
, gimple_bb (stmt
), EDGE_TRUE_VALUE
);
3111 e1
->probability
= profile_probability::unlikely ();
3112 e2
->probability
= e1
->probability
.invert ();
3113 m_gsi
= gsi_after_labels (e1
->dest
);
3114 edges
[2 * i
+ 1] = e2
;
3116 if (kind
== bitint_prec_huge
&& i
== cnt
- 1)
3118 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_int (-1));
3120 g
= gimple_build_cond (NE_EXPR
, idx
, size_zero_node
,
3121 NULL_TREE
, NULL_TREE
);
3123 edge true_edge
, false_edge
;
3124 extract_true_false_edges_from_block (gsi_bb (m_gsi
),
3125 &true_edge
, &false_edge
);
3126 m_gsi
= gsi_after_labels (false_edge
->dest
);
3131 tree lhs
= make_ssa_name (boolean_type_node
);
3132 basic_block bb
= gimple_bb (stmt
);
3133 gphi
*phi
= create_phi_node (lhs
, bb
);
3134 for (unsigned int i
= 0; i
< cnt
* 2; i
++)
3136 tree val
= ((cmp_code
== GT_EXPR
|| cmp_code
== GE_EXPR
)
3137 ^ (i
& 1)) ? boolean_true_node
: boolean_false_node
;
3138 add_phi_arg (phi
, val
, edges
[i
], UNKNOWN_LOCATION
);
3140 add_phi_arg (phi
, (cmp_code
== GE_EXPR
|| cmp_code
== LE_EXPR
)
3141 ? boolean_true_node
: boolean_false_node
,
3142 find_edge (gsi_bb (m_gsi
), bb
), UNKNOWN_LOCATION
);
3147 /* Lower large/huge _BitInt left and right shift except for left
3148 shift by < limb_prec constant. */
3151 bitint_large_huge::lower_shift_stmt (tree obj
, gimple
*stmt
)
3153 tree rhs1
= gimple_assign_rhs1 (stmt
);
3154 tree lhs
= gimple_assign_lhs (stmt
);
3155 tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3156 tree type
= TREE_TYPE (rhs1
);
3157 gimple
*final_stmt
= gsi_stmt (m_gsi
);
3158 gcc_assert (TREE_CODE (type
) == BITINT_TYPE
3159 && bitint_precision_kind (type
) >= bitint_prec_large
);
3160 int prec
= TYPE_PRECISION (type
);
3161 tree n
= gimple_assign_rhs2 (stmt
), n1
, n2
, n3
, n4
;
3163 if (obj
== NULL_TREE
)
3165 int part
= var_to_partition (m_map
, lhs
);
3166 gcc_assert (m_vars
[part
] != NULL_TREE
);
3169 /* Preparation code common for both left and right shifts.
3170 unsigned n1 = n % limb_prec;
3171 size_t n2 = n / limb_prec;
3172 size_t n3 = n1 != 0;
3173 unsigned n4 = (limb_prec - n1) % limb_prec;
3174 (for power of 2 limb_prec n4 can be -n1 & (limb_prec)). */
3175 if (TREE_CODE (n
) == INTEGER_CST
)
3177 tree lp
= build_int_cst (TREE_TYPE (n
), limb_prec
);
3178 n1
= int_const_binop (TRUNC_MOD_EXPR
, n
, lp
);
3179 n2
= fold_convert (sizetype
, int_const_binop (TRUNC_DIV_EXPR
, n
, lp
));
3180 n3
= size_int (!integer_zerop (n1
));
3181 n4
= int_const_binop (TRUNC_MOD_EXPR
,
3182 int_const_binop (MINUS_EXPR
, lp
, n1
), lp
);
3186 n1
= make_ssa_name (TREE_TYPE (n
));
3187 n2
= make_ssa_name (sizetype
);
3188 n3
= make_ssa_name (sizetype
);
3189 n4
= make_ssa_name (TREE_TYPE (n
));
3190 if (pow2p_hwi (limb_prec
))
3192 tree lpm1
= build_int_cst (TREE_TYPE (n
), limb_prec
- 1);
3193 g
= gimple_build_assign (n1
, BIT_AND_EXPR
, n
, lpm1
);
3195 g
= gimple_build_assign (useless_type_conversion_p (sizetype
,
3197 ? n2
: make_ssa_name (TREE_TYPE (n
)),
3199 build_int_cst (TREE_TYPE (n
),
3200 exact_log2 (limb_prec
)));
3202 if (gimple_assign_lhs (g
) != n2
)
3204 g
= gimple_build_assign (n2
, NOP_EXPR
, gimple_assign_lhs (g
));
3207 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (n
)),
3210 g
= gimple_build_assign (n4
, BIT_AND_EXPR
, gimple_assign_lhs (g
),
3216 tree lp
= build_int_cst (TREE_TYPE (n
), limb_prec
);
3217 g
= gimple_build_assign (n1
, TRUNC_MOD_EXPR
, n
, lp
);
3219 g
= gimple_build_assign (useless_type_conversion_p (sizetype
,
3221 ? n2
: make_ssa_name (TREE_TYPE (n
)),
3222 TRUNC_DIV_EXPR
, n
, lp
);
3224 if (gimple_assign_lhs (g
) != n2
)
3226 g
= gimple_build_assign (n2
, NOP_EXPR
, gimple_assign_lhs (g
));
3229 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (n
)),
3230 MINUS_EXPR
, lp
, n1
);
3232 g
= gimple_build_assign (n4
, TRUNC_MOD_EXPR
, gimple_assign_lhs (g
),
3236 g
= gimple_build_assign (make_ssa_name (boolean_type_node
), NE_EXPR
, n1
,
3237 build_zero_cst (TREE_TYPE (n
)));
3239 g
= gimple_build_assign (n3
, NOP_EXPR
, gimple_assign_lhs (g
));
3242 tree p
= build_int_cst (sizetype
,
3243 prec
/ limb_prec
- (prec
% limb_prec
== 0));
3244 if (rhs_code
== RSHIFT_EXPR
)
3249 unsigned n1 = n % limb_prec;
3250 size_t n2 = n / limb_prec;
3251 size_t n3 = n1 != 0;
3252 unsigned n4 = (limb_prec - n1) % limb_prec;
3254 size_t p = prec / limb_prec - (prec % limb_prec == 0);
3255 int signed_p = (typeof (src) -1) < 0;
3256 for (idx = n2; idx < ((!signed_p && (prec % limb_prec == 0))
3257 ? p : p - n3); ++idx)
3258 dst[idx - n2] = (src[idx] >> n1) | (src[idx + n3] << n4);
3260 if (prec % limb_prec == 0)
3263 ext = ((signed limb_type) (src[p] << (limb_prec
3264 - (prec % limb_prec))))
3265 >> (limb_prec - (prec % limb_prec));
3267 ext = src[p] & (((limb_type) 1 << (prec % limb_prec)) - 1);
3268 if (!signed_p && (prec % limb_prec == 0))
3270 else if (idx < prec / 64)
3272 dst[idx - n2] = (src[idx] >> n1) | (ext << n4);
3278 dst[idx] = ((signed limb_type) ext) >> n1;
3279 ext = ((signed limb_type) ext) >> (limb_prec - 1);
3283 dst[idx] = ext >> n1;
3286 for (++idx; idx <= p; ++idx)
3289 if (TYPE_UNSIGNED (type
) && prec
% limb_prec
== 0)
3291 else if (TREE_CODE (n3
) == INTEGER_CST
)
3292 pmn3
= int_const_binop (MINUS_EXPR
, p
, n3
);
3295 pmn3
= make_ssa_name (sizetype
);
3296 g
= gimple_build_assign (pmn3
, MINUS_EXPR
, p
, n3
);
3299 g
= gimple_build_cond (LT_EXPR
, n2
, pmn3
, NULL_TREE
, NULL_TREE
);
3300 edge edge_true
, edge_false
;
3301 if_then (g
, profile_probability::likely (), edge_true
, edge_false
);
3303 tree idx
= create_loop (n2
, &idx_next
);
3304 tree idxmn2
= make_ssa_name (sizetype
);
3305 tree idxpn3
= make_ssa_name (sizetype
);
3306 g
= gimple_build_assign (idxmn2
, MINUS_EXPR
, idx
, n2
);
3308 g
= gimple_build_assign (idxpn3
, PLUS_EXPR
, idx
, n3
);
3311 tree t1
= handle_operand (rhs1
, idx
);
3313 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3314 RSHIFT_EXPR
, t1
, n1
);
3316 t1
= gimple_assign_lhs (g
);
3317 if (!integer_zerop (n3
))
3320 tree t2
= handle_operand (rhs1
, idxpn3
);
3321 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3322 LSHIFT_EXPR
, t2
, n4
);
3324 t2
= gimple_assign_lhs (g
);
3325 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3326 BIT_IOR_EXPR
, t1
, t2
);
3328 t1
= gimple_assign_lhs (g
);
3330 tree l
= limb_access (TREE_TYPE (lhs
), obj
, idxmn2
, true);
3331 g
= gimple_build_assign (l
, t1
);
3333 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_one_node
);
3335 g
= gimple_build_cond (LT_EXPR
, idx_next
, pmn3
, NULL_TREE
, NULL_TREE
);
3337 idx
= make_ssa_name (sizetype
);
3338 m_gsi
= gsi_for_stmt (final_stmt
);
3339 gphi
*phi
= create_phi_node (idx
, gsi_bb (m_gsi
));
3340 edge_false
= find_edge (edge_false
->src
, gsi_bb (m_gsi
));
3341 edge_true
= EDGE_PRED (gsi_bb (m_gsi
),
3342 EDGE_PRED (gsi_bb (m_gsi
), 0) == edge_false
);
3343 add_phi_arg (phi
, n2
, edge_false
, UNKNOWN_LOCATION
);
3344 add_phi_arg (phi
, idx_next
, edge_true
, UNKNOWN_LOCATION
);
3346 tree ms
= handle_operand (rhs1
, p
);
3348 if (!types_compatible_p (TREE_TYPE (ms
), m_limb_type
))
3349 ext
= add_cast (m_limb_type
, ms
);
3350 if (!(TYPE_UNSIGNED (type
) && prec
% limb_prec
== 0)
3351 && !integer_zerop (n3
))
3353 g
= gimple_build_cond (LT_EXPR
, idx
, p
, NULL_TREE
, NULL_TREE
);
3354 if_then (g
, profile_probability::likely (), edge_true
, edge_false
);
3356 t1
= handle_operand (rhs1
, idx
);
3357 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3358 RSHIFT_EXPR
, t1
, n1
);
3360 t1
= gimple_assign_lhs (g
);
3361 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3362 LSHIFT_EXPR
, ext
, n4
);
3364 tree t2
= gimple_assign_lhs (g
);
3365 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3366 BIT_IOR_EXPR
, t1
, t2
);
3368 t1
= gimple_assign_lhs (g
);
3369 idxmn2
= make_ssa_name (sizetype
);
3370 g
= gimple_build_assign (idxmn2
, MINUS_EXPR
, idx
, n2
);
3372 l
= limb_access (TREE_TYPE (lhs
), obj
, idxmn2
, true);
3373 g
= gimple_build_assign (l
, t1
);
3375 idx_next
= make_ssa_name (sizetype
);
3376 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_one_node
);
3378 m_gsi
= gsi_for_stmt (final_stmt
);
3379 tree nidx
= make_ssa_name (sizetype
);
3380 phi
= create_phi_node (nidx
, gsi_bb (m_gsi
));
3381 edge_false
= find_edge (edge_false
->src
, gsi_bb (m_gsi
));
3382 edge_true
= EDGE_PRED (gsi_bb (m_gsi
),
3383 EDGE_PRED (gsi_bb (m_gsi
), 0) == edge_false
);
3384 add_phi_arg (phi
, idx
, edge_false
, UNKNOWN_LOCATION
);
3385 add_phi_arg (phi
, idx_next
, edge_true
, UNKNOWN_LOCATION
);
3388 g
= gimple_build_assign (make_ssa_name (sizetype
), MINUS_EXPR
, idx
, n2
);
3390 idx
= gimple_assign_lhs (g
);
3392 if (!TYPE_UNSIGNED (type
))
3393 sext
= add_cast (signed_type_for (m_limb_type
), ext
);
3394 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (sext
)),
3395 RSHIFT_EXPR
, sext
, n1
);
3397 t1
= gimple_assign_lhs (g
);
3398 if (!TYPE_UNSIGNED (type
))
3400 t1
= add_cast (m_limb_type
, t1
);
3401 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (sext
)),
3403 build_int_cst (TREE_TYPE (n
),
3406 ext
= add_cast (m_limb_type
, gimple_assign_lhs (g
));
3409 ext
= build_zero_cst (m_limb_type
);
3410 l
= limb_access (TREE_TYPE (lhs
), obj
, idx
, true);
3411 g
= gimple_build_assign (l
, t1
);
3413 g
= gimple_build_assign (make_ssa_name (sizetype
), PLUS_EXPR
, idx
,
3416 idx
= gimple_assign_lhs (g
);
3417 g
= gimple_build_cond (LE_EXPR
, idx
, p
, NULL_TREE
, NULL_TREE
);
3418 if_then (g
, profile_probability::likely (), edge_true
, edge_false
);
3419 idx
= create_loop (idx
, &idx_next
);
3420 l
= limb_access (TREE_TYPE (lhs
), obj
, idx
, true);
3421 g
= gimple_build_assign (l
, ext
);
3423 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_one_node
);
3425 g
= gimple_build_cond (LE_EXPR
, idx_next
, p
, NULL_TREE
, NULL_TREE
);
3433 unsigned n1 = n % limb_prec;
3434 size_t n2 = n / limb_prec;
3435 size_t n3 = n1 != 0;
3436 unsigned n4 = (limb_prec - n1) % limb_prec;
3438 size_t p = prec / limb_prec - (prec % limb_prec == 0);
3439 for (idx = p; (ssize_t) idx >= (ssize_t) (n2 + n3); --idx)
3440 dst[idx] = (src[idx - n2] << n1) | (src[idx - n2 - n3] >> n4);
3443 dst[idx] = src[idx - n2] << n1;
3446 for (; (ssize_t) idx >= 0; --idx)
3449 if (TREE_CODE (n2
) == INTEGER_CST
&& TREE_CODE (n3
) == INTEGER_CST
)
3450 n2pn3
= int_const_binop (PLUS_EXPR
, n2
, n3
);
3453 n2pn3
= make_ssa_name (sizetype
);
3454 g
= gimple_build_assign (n2pn3
, PLUS_EXPR
, n2
, n3
);
3457 /* For LSHIFT_EXPR, we can use handle_operand with non-INTEGER_CST
3458 idx even to access the most significant partial limb. */
3460 if (integer_zerop (n3
))
3461 /* For n3 == 0 p >= n2 + n3 is always true for all valid shift
3462 counts. Emit if (true) condition that can be optimized later. */
3463 g
= gimple_build_cond (NE_EXPR
, boolean_true_node
, boolean_false_node
,
3464 NULL_TREE
, NULL_TREE
);
3466 g
= gimple_build_cond (LE_EXPR
, n2pn3
, p
, NULL_TREE
, NULL_TREE
);
3467 edge edge_true
, edge_false
;
3468 if_then (g
, profile_probability::likely (), edge_true
, edge_false
);
3470 tree idx
= create_loop (p
, &idx_next
);
3471 tree idxmn2
= make_ssa_name (sizetype
);
3472 tree idxmn2mn3
= make_ssa_name (sizetype
);
3473 g
= gimple_build_assign (idxmn2
, MINUS_EXPR
, idx
, n2
);
3475 g
= gimple_build_assign (idxmn2mn3
, MINUS_EXPR
, idxmn2
, n3
);
3478 tree t1
= handle_operand (rhs1
, idxmn2
);
3480 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3481 LSHIFT_EXPR
, t1
, n1
);
3483 t1
= gimple_assign_lhs (g
);
3484 if (!integer_zerop (n3
))
3487 tree t2
= handle_operand (rhs1
, idxmn2mn3
);
3488 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3489 RSHIFT_EXPR
, t2
, n4
);
3491 t2
= gimple_assign_lhs (g
);
3492 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3493 BIT_IOR_EXPR
, t1
, t2
);
3495 t1
= gimple_assign_lhs (g
);
3497 tree l
= limb_access (TREE_TYPE (lhs
), obj
, idx
, true);
3498 g
= gimple_build_assign (l
, t1
);
3500 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_int (-1));
3502 tree sn2pn3
= add_cast (ssizetype
, n2pn3
);
3503 g
= gimple_build_cond (GE_EXPR
, add_cast (ssizetype
, idx_next
), sn2pn3
,
3504 NULL_TREE
, NULL_TREE
);
3506 idx
= make_ssa_name (sizetype
);
3507 m_gsi
= gsi_for_stmt (final_stmt
);
3508 gphi
*phi
= create_phi_node (idx
, gsi_bb (m_gsi
));
3509 edge_false
= find_edge (edge_false
->src
, gsi_bb (m_gsi
));
3510 edge_true
= EDGE_PRED (gsi_bb (m_gsi
),
3511 EDGE_PRED (gsi_bb (m_gsi
), 0) == edge_false
);
3512 add_phi_arg (phi
, p
, edge_false
, UNKNOWN_LOCATION
);
3513 add_phi_arg (phi
, idx_next
, edge_true
, UNKNOWN_LOCATION
);
3515 if (!integer_zerop (n3
))
3517 g
= gimple_build_cond (NE_EXPR
, n3
, size_zero_node
,
3518 NULL_TREE
, NULL_TREE
);
3519 if_then (g
, profile_probability::likely (), edge_true
, edge_false
);
3520 idxmn2
= make_ssa_name (sizetype
);
3521 g
= gimple_build_assign (idxmn2
, MINUS_EXPR
, idx
, n2
);
3524 t1
= handle_operand (rhs1
, idxmn2
);
3525 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3526 LSHIFT_EXPR
, t1
, n1
);
3528 t1
= gimple_assign_lhs (g
);
3529 l
= limb_access (TREE_TYPE (lhs
), obj
, idx
, true);
3530 g
= gimple_build_assign (l
, t1
);
3532 idx_next
= make_ssa_name (sizetype
);
3533 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_int (-1));
3535 m_gsi
= gsi_for_stmt (final_stmt
);
3536 tree nidx
= make_ssa_name (sizetype
);
3537 phi
= create_phi_node (nidx
, gsi_bb (m_gsi
));
3538 edge_false
= find_edge (edge_false
->src
, gsi_bb (m_gsi
));
3539 edge_true
= EDGE_PRED (gsi_bb (m_gsi
),
3540 EDGE_PRED (gsi_bb (m_gsi
), 0) == edge_false
);
3541 add_phi_arg (phi
, idx
, edge_false
, UNKNOWN_LOCATION
);
3542 add_phi_arg (phi
, idx_next
, edge_true
, UNKNOWN_LOCATION
);
3545 g
= gimple_build_cond (GE_EXPR
, add_cast (ssizetype
, idx
),
3546 ssize_int (0), NULL_TREE
, NULL_TREE
);
3547 if_then (g
, profile_probability::likely (), edge_true
, edge_false
);
3548 idx
= create_loop (idx
, &idx_next
);
3549 l
= limb_access (TREE_TYPE (lhs
), obj
, idx
, true);
3550 g
= gimple_build_assign (l
, build_zero_cst (m_limb_type
));
3552 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_int (-1));
3554 g
= gimple_build_cond (GE_EXPR
, add_cast (ssizetype
, idx_next
),
3555 ssize_int (0), NULL_TREE
, NULL_TREE
);
3560 /* Lower large/huge _BitInt multiplication or division. */
3563 bitint_large_huge::lower_muldiv_stmt (tree obj
, gimple
*stmt
)
3565 tree rhs1
= gimple_assign_rhs1 (stmt
);
3566 tree rhs2
= gimple_assign_rhs2 (stmt
);
3567 tree lhs
= gimple_assign_lhs (stmt
);
3568 tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3569 tree type
= TREE_TYPE (rhs1
);
3570 gcc_assert (TREE_CODE (type
) == BITINT_TYPE
3571 && bitint_precision_kind (type
) >= bitint_prec_large
);
3572 int prec
= TYPE_PRECISION (type
), prec1
, prec2
;
3573 rhs1
= handle_operand_addr (rhs1
, stmt
, NULL
, &prec1
);
3574 rhs2
= handle_operand_addr (rhs2
, stmt
, NULL
, &prec2
);
3575 if (obj
== NULL_TREE
)
3577 int part
= var_to_partition (m_map
, lhs
);
3578 gcc_assert (m_vars
[part
] != NULL_TREE
);
3580 lhs
= build_fold_addr_expr (obj
);
3584 lhs
= build_fold_addr_expr (obj
);
3585 lhs
= force_gimple_operand_gsi (&m_gsi
, lhs
, true,
3586 NULL_TREE
, true, GSI_SAME_STMT
);
3588 tree sitype
= lang_hooks
.types
.type_for_mode (SImode
, 0);
3593 g
= gimple_build_call_internal (IFN_MULBITINT
, 6,
3594 lhs
, build_int_cst (sitype
, prec
),
3595 rhs1
, build_int_cst (sitype
, prec1
),
3596 rhs2
, build_int_cst (sitype
, prec2
));
3599 case TRUNC_DIV_EXPR
:
3600 case EXACT_DIV_EXPR
:
3601 g
= gimple_build_call_internal (IFN_DIVMODBITINT
, 8,
3602 lhs
, build_int_cst (sitype
, prec
),
3604 build_int_cst (sitype
, 0),
3605 rhs1
, build_int_cst (sitype
, prec1
),
3606 rhs2
, build_int_cst (sitype
, prec2
));
3607 if (!stmt_ends_bb_p (stmt
))
3608 gimple_call_set_nothrow (as_a
<gcall
*> (g
), true);
3611 case TRUNC_MOD_EXPR
:
3612 g
= gimple_build_call_internal (IFN_DIVMODBITINT
, 8, null_pointer_node
,
3613 build_int_cst (sitype
, 0),
3614 lhs
, build_int_cst (sitype
, prec
),
3615 rhs1
, build_int_cst (sitype
, prec1
),
3616 rhs2
, build_int_cst (sitype
, prec2
));
3617 if (!stmt_ends_bb_p (stmt
))
3618 gimple_call_set_nothrow (as_a
<gcall
*> (g
), true);
3624 if (stmt_ends_bb_p (stmt
))
3626 maybe_duplicate_eh_stmt (g
, stmt
);
3629 basic_block bb
= gimple_bb (stmt
);
3631 FOR_EACH_EDGE (e1
, ei
, bb
->succs
)
3632 if (e1
->flags
& EDGE_EH
)
3636 edge e2
= split_block (gsi_bb (m_gsi
), g
);
3637 m_gsi
= gsi_after_labels (e2
->dest
);
3638 add_eh_edge (e2
->src
, e1
);
3643 /* Lower large/huge _BitInt conversion to/from floating point. */
3646 bitint_large_huge::lower_float_conv_stmt (tree obj
, gimple
*stmt
)
3648 tree rhs1
= gimple_assign_rhs1 (stmt
);
3649 tree lhs
= gimple_assign_lhs (stmt
);
3650 tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3651 tree sitype
= lang_hooks
.types
.type_for_mode (SImode
, 0);
3653 if (rhs_code
== FIX_TRUNC_EXPR
)
3655 int prec
= TYPE_PRECISION (TREE_TYPE (lhs
));
3656 if (!TYPE_UNSIGNED (TREE_TYPE (lhs
)))
3658 if (obj
== NULL_TREE
)
3660 int part
= var_to_partition (m_map
, lhs
);
3661 gcc_assert (m_vars
[part
] != NULL_TREE
);
3663 lhs
= build_fold_addr_expr (obj
);
3667 lhs
= build_fold_addr_expr (obj
);
3668 lhs
= force_gimple_operand_gsi (&m_gsi
, lhs
, true,
3669 NULL_TREE
, true, GSI_SAME_STMT
);
3671 scalar_mode from_mode
3672 = as_a
<scalar_mode
> (TYPE_MODE (TREE_TYPE (rhs1
)));
3674 /* IEEE single is a full superset of both IEEE half and
3675 bfloat formats, convert to float first and then to _BitInt
3676 to avoid the need of another 2 library routines. */
3677 if ((REAL_MODE_FORMAT (from_mode
) == &arm_bfloat_half_format
3678 || REAL_MODE_FORMAT (from_mode
) == &ieee_half_format
)
3679 && REAL_MODE_FORMAT (SFmode
) == &ieee_single_format
)
3681 tree type
= lang_hooks
.types
.type_for_mode (SFmode
, 0);
3683 rhs1
= add_cast (type
, rhs1
);
3686 g
= gimple_build_call_internal (IFN_FLOATTOBITINT
, 3,
3687 lhs
, build_int_cst (sitype
, prec
),
3694 rhs1
= handle_operand_addr (rhs1
, stmt
, NULL
, &prec
);
3695 g
= gimple_build_call_internal (IFN_BITINTTOFLOAT
, 2,
3696 rhs1
, build_int_cst (sitype
, prec
));
3697 gimple_call_set_lhs (g
, lhs
);
3698 if (!stmt_ends_bb_p (stmt
))
3699 gimple_call_set_nothrow (as_a
<gcall
*> (g
), true);
3700 gsi_replace (&m_gsi
, g
, true);
3704 /* Helper method for lower_addsub_overflow and lower_mul_overflow.
3705 If check_zero is true, caller wants to check if all bits in [start, end)
3706 are zero, otherwise if bits in [start, end) are either all zero or
3707 all ones. L is the limb with index LIMB, START and END are measured
3711 bitint_large_huge::arith_overflow_extract_bits (unsigned int start
,
3712 unsigned int end
, tree l
,
3716 unsigned startlimb
= start
/ limb_prec
;
3717 unsigned endlimb
= (end
- 1) / limb_prec
;
3720 if ((start
% limb_prec
) == 0 && (end
% limb_prec
) == 0)
3722 if (startlimb
== endlimb
&& limb
== startlimb
)
3726 wide_int w
= wi::shifted_mask (start
% limb_prec
,
3727 end
- start
, false, limb_prec
);
3728 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3730 wide_int_to_tree (m_limb_type
, w
));
3732 return gimple_assign_lhs (g
);
3734 unsigned int shift
= start
% limb_prec
;
3735 if ((end
% limb_prec
) != 0)
3737 unsigned int lshift
= (-end
) % limb_prec
;
3739 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3741 build_int_cst (unsigned_type_node
,
3744 l
= gimple_assign_lhs (g
);
3746 l
= add_cast (signed_type_for (m_limb_type
), l
);
3747 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (l
)),
3749 build_int_cst (unsigned_type_node
, shift
));
3751 return add_cast (m_limb_type
, gimple_assign_lhs (g
));
3753 else if (limb
== startlimb
)
3755 if ((start
% limb_prec
) == 0)
3758 l
= add_cast (signed_type_for (m_limb_type
), l
);
3759 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (l
)),
3761 build_int_cst (unsigned_type_node
,
3762 start
% limb_prec
));
3764 l
= gimple_assign_lhs (g
);
3766 l
= add_cast (m_limb_type
, l
);
3769 else if (limb
== endlimb
)
3771 if ((end
% limb_prec
) == 0)
3775 wide_int w
= wi::mask (end
% limb_prec
, false, limb_prec
);
3776 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3778 wide_int_to_tree (m_limb_type
, w
));
3780 return gimple_assign_lhs (g
);
3782 unsigned int shift
= (-end
) % limb_prec
;
3783 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3785 build_int_cst (unsigned_type_node
, shift
));
3787 l
= add_cast (signed_type_for (m_limb_type
), gimple_assign_lhs (g
));
3788 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (l
)),
3790 build_int_cst (unsigned_type_node
, shift
));
3792 return add_cast (m_limb_type
, gimple_assign_lhs (g
));
3797 /* Helper method for lower_addsub_overflow and lower_mul_overflow. Store
3798 result including overflow flag into the right locations. */
3801 bitint_large_huge::finish_arith_overflow (tree var
, tree obj
, tree type
,
3802 tree ovf
, tree lhs
, tree orig_obj
,
3803 gimple
*stmt
, tree_code code
)
3807 if (obj
== NULL_TREE
3808 && (TREE_CODE (type
) != BITINT_TYPE
3809 || bitint_precision_kind (type
) < bitint_prec_large
))
3811 /* Add support for 3 or more limbs filled in from normal integral
3812 type if this assert fails. If no target chooses limb mode smaller
3813 than half of largest supported normal integral type, this will not
3815 gcc_assert (TYPE_PRECISION (type
) <= 2 * limb_prec
);
3816 tree lhs_type
= type
;
3817 if (TREE_CODE (type
) == BITINT_TYPE
3818 && bitint_precision_kind (type
) == bitint_prec_middle
)
3819 lhs_type
= build_nonstandard_integer_type (TYPE_PRECISION (type
),
3820 TYPE_UNSIGNED (type
));
3821 tree r1
= limb_access (NULL_TREE
, var
, size_int (0), true);
3822 g
= gimple_build_assign (make_ssa_name (m_limb_type
), r1
);
3824 r1
= gimple_assign_lhs (g
);
3825 if (!useless_type_conversion_p (lhs_type
, TREE_TYPE (r1
)))
3826 r1
= add_cast (lhs_type
, r1
);
3827 if (TYPE_PRECISION (lhs_type
) > limb_prec
)
3829 tree r2
= limb_access (NULL_TREE
, var
, size_int (1), true);
3830 g
= gimple_build_assign (make_ssa_name (m_limb_type
), r2
);
3832 r2
= gimple_assign_lhs (g
);
3833 r2
= add_cast (lhs_type
, r2
);
3834 g
= gimple_build_assign (make_ssa_name (lhs_type
), LSHIFT_EXPR
, r2
,
3835 build_int_cst (unsigned_type_node
,
3838 g
= gimple_build_assign (make_ssa_name (lhs_type
), BIT_IOR_EXPR
, r1
,
3839 gimple_assign_lhs (g
));
3841 r1
= gimple_assign_lhs (g
);
3843 if (lhs_type
!= type
)
3844 r1
= add_cast (type
, r1
);
3845 ovf
= add_cast (lhs_type
, ovf
);
3846 if (lhs_type
!= type
)
3847 ovf
= add_cast (type
, ovf
);
3848 g
= gimple_build_assign (lhs
, COMPLEX_EXPR
, r1
, ovf
);
3849 m_gsi
= gsi_for_stmt (stmt
);
3850 gsi_replace (&m_gsi
, g
, true);
3854 unsigned HOST_WIDE_INT nelts
= 0;
3855 tree atype
= NULL_TREE
;
3858 nelts
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj
))) / limb_prec
;
3859 if (orig_obj
== NULL_TREE
)
3861 atype
= build_array_type_nelts (m_limb_type
, nelts
);
3867 if (orig_obj
== NULL_TREE
)
3869 zero
= build_zero_cst (build_pointer_type (TREE_TYPE (obj
)));
3870 v1
= build2 (MEM_REF
, atype
,
3871 build_fold_addr_expr (unshare_expr (obj
)), zero
);
3873 else if (!useless_type_conversion_p (atype
, TREE_TYPE (obj
)))
3874 v1
= build1 (VIEW_CONVERT_EXPR
, atype
, unshare_expr (obj
));
3876 v1
= unshare_expr (obj
);
3877 zero
= build_zero_cst (build_pointer_type (TREE_TYPE (var
)));
3878 v2
= build2 (MEM_REF
, atype
, build_fold_addr_expr (var
), zero
);
3879 g
= gimple_build_assign (v1
, v2
);
3882 if (orig_obj
== NULL_TREE
&& obj
)
3884 ovf
= add_cast (m_limb_type
, ovf
);
3885 tree l
= limb_access (NULL_TREE
, obj
, size_int (nelts
), true);
3886 g
= gimple_build_assign (l
, ovf
);
3890 atype
= build_array_type_nelts (m_limb_type
, nelts
- 1);
3891 tree off
= build_int_cst (build_pointer_type (TREE_TYPE (obj
)),
3892 (nelts
+ 1) * m_limb_size
);
3893 tree v1
= build2 (MEM_REF
, atype
,
3894 build_fold_addr_expr (unshare_expr (obj
)),
3896 g
= gimple_build_assign (v1
, build_zero_cst (atype
));
3900 else if (TREE_CODE (TREE_TYPE (lhs
)) == COMPLEX_TYPE
)
3902 imm_use_iterator ui
;
3903 use_operand_p use_p
;
3904 FOR_EACH_IMM_USE_FAST (use_p
, ui
, lhs
)
3906 g
= USE_STMT (use_p
);
3907 if (!is_gimple_assign (g
)
3908 || gimple_assign_rhs_code (g
) != IMAGPART_EXPR
)
3910 tree lhs2
= gimple_assign_lhs (g
);
3912 single_imm_use (lhs2
, &use_p
, &use_stmt
);
3913 lhs2
= gimple_assign_lhs (use_stmt
);
3914 gimple_stmt_iterator gsi
= gsi_for_stmt (use_stmt
);
3915 if (useless_type_conversion_p (TREE_TYPE (lhs2
), TREE_TYPE (ovf
)))
3916 g
= gimple_build_assign (lhs2
, ovf
);
3918 g
= gimple_build_assign (lhs2
, NOP_EXPR
, ovf
);
3919 gsi_replace (&gsi
, g
, true);
3920 if (gsi_stmt (m_gsi
) == use_stmt
)
3921 m_gsi
= gsi_for_stmt (g
);
3925 else if (ovf
!= boolean_false_node
)
3927 g
= gimple_build_cond (NE_EXPR
, ovf
, boolean_false_node
,
3928 NULL_TREE
, NULL_TREE
);
3929 edge edge_true
, edge_false
;
3930 if_then (g
, profile_probability::very_unlikely (),
3931 edge_true
, edge_false
);
3932 tree zero
= build_zero_cst (TREE_TYPE (lhs
));
3933 tree fn
= ubsan_build_overflow_builtin (code
, m_loc
,
3936 force_gimple_operand_gsi (&m_gsi
, fn
, true, NULL_TREE
,
3937 true, GSI_SAME_STMT
);
3938 m_gsi
= gsi_after_labels (edge_true
->dest
);
3943 tree clobber
= build_clobber (TREE_TYPE (var
), CLOBBER_STORAGE_END
);
3944 g
= gimple_build_assign (var
, clobber
);
3945 gsi_insert_after (&m_gsi
, g
, GSI_SAME_STMT
);
3949 /* Helper function for lower_addsub_overflow and lower_mul_overflow.
3950 Given precisions of result TYPE (PREC), argument 0 precision PREC0,
3951 argument 1 precision PREC1 and minimum precision for the result
3952 PREC2, compute *START, *END, *CHECK_ZERO and return OVF. */
3955 arith_overflow (tree_code code
, tree type
, int prec
, int prec0
, int prec1
,
3956 int prec2
, unsigned *start
, unsigned *end
, bool *check_zero
)
3961 /* Ignore this special rule for subtraction, even if both
3962 prec0 >= 0 and prec1 >= 0, their subtraction can be negative
3963 in infinite precision. */
3964 if (code
!= MINUS_EXPR
&& prec0
>= 0 && prec1
>= 0)
3966 /* Result in [0, prec2) is unsigned, if prec > prec2,
3967 all bits above it will be zero. */
3968 if ((prec
- !TYPE_UNSIGNED (type
)) >= prec2
)
3969 return boolean_false_node
;
3972 /* ovf if any of bits in [start, end) is non-zero. */
3973 *start
= prec
- !TYPE_UNSIGNED (type
);
3977 else if (TYPE_UNSIGNED (type
))
3979 /* If result in [0, prec2) is signed and if prec > prec2,
3980 all bits above it will be sign bit copies. */
3983 /* ovf if bit prec - 1 is non-zero. */
3989 /* ovf if any of bits in [start, end) is non-zero. */
3994 else if (prec
>= prec2
)
3995 return boolean_false_node
;
3998 /* ovf if [start, end) bits aren't all zeros or all ones. */
4001 *check_zero
= false;
4006 /* Lower a .{ADD,SUB}_OVERFLOW call with at least one large/huge _BitInt
4007 argument or return type _Complex large/huge _BitInt. */
4010 bitint_large_huge::lower_addsub_overflow (tree obj
, gimple
*stmt
)
4012 tree arg0
= gimple_call_arg (stmt
, 0);
4013 tree arg1
= gimple_call_arg (stmt
, 1);
4014 tree lhs
= gimple_call_lhs (stmt
);
4019 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
4020 gsi_remove (&gsi
, true);
4023 gimple
*final_stmt
= gsi_stmt (m_gsi
);
4024 tree type
= TREE_TYPE (lhs
);
4025 if (TREE_CODE (type
) == COMPLEX_TYPE
)
4026 type
= TREE_TYPE (type
);
4027 int prec
= TYPE_PRECISION (type
);
4028 int prec0
= range_to_prec (arg0
, stmt
);
4029 int prec1
= range_to_prec (arg1
, stmt
);
4030 /* If PREC0 >= 0 && PREC1 >= 0 and CODE is not MINUS_EXPR, PREC2 is
4031 the be minimum unsigned precision of any possible operation's
4032 result, otherwise it is minimum signed precision.
4034 If PREC0 or PREC1 is 8, it means that argument is [0, 0xff],
4035 if PREC0 or PREC1 is 10, it means that argument is [0, 0x3ff],
4036 if PREC0 or PREC1 is -8, it means that argument is [-0x80, 0x7f],
4037 if PREC0 or PREC1 is -10, it means that argument is [-0x200, 0x1ff].
4038 PREC0 CODE PREC1 RESULT PREC2 SIGNED vs. UNSIGNED
4039 8 + 8 [0, 0x1fe] 9 UNSIGNED
4040 8 + 10 [0, 0x4fe] 11 UNSIGNED
4041 -8 + -8 [-0x100, 0xfe] 9 SIGNED
4042 -8 + -10 [-0x280, 0x27e] 11 SIGNED
4043 8 + -8 [-0x80, 0x17e] 10 SIGNED
4044 8 + -10 [-0x200, 0x2fe] 11 SIGNED
4045 10 + -8 [-0x80, 0x47e] 12 SIGNED
4046 8 - 8 [-0xff, 0xff] 9 SIGNED
4047 8 - 10 [-0x3ff, 0xff] 11 SIGNED
4048 10 - 8 [-0xff, 0x3ff] 11 SIGNED
4049 -8 - -8 [-0xff, 0xff] 9 SIGNED
4050 -8 - -10 [-0x27f, 0x27f] 11 SIGNED
4051 -10 - -8 [-0x27f, 0x27f] 11 SIGNED
4052 8 - -8 [-0x7f, 0x17f] 10 SIGNED
4053 8 - -10 [-0x1ff, 0x2ff] 11 SIGNED
4054 10 - -8 [-0x7f, 0x47f] 12 SIGNED
4055 -8 - 8 [-0x17f, 0x7f] 10 SIGNED
4056 -8 - 10 [-0x47f, 0x7f] 12 SIGNED
4057 -10 - 8 [-0x2ff, 0x1ff] 11 SIGNED */
4058 int prec2
= MAX (prec0
< 0 ? -prec0
: prec0
,
4059 prec1
< 0 ? -prec1
: prec1
);
4060 /* If operands are either both signed or both unsigned,
4061 we need just one additional bit. */
4062 prec2
= (((prec0
< 0) == (prec1
< 0)
4063 /* If one operand is signed and one unsigned and
4064 the signed one has larger precision, we need
4065 just one extra bit, otherwise two. */
4066 || (prec0
< 0 ? (prec2
== -prec0
&& prec2
!= prec1
)
4067 : (prec2
== -prec1
&& prec2
!= prec0
)))
4068 ? prec2
+ 1 : prec2
+ 2);
4069 int prec3
= MAX (prec0
< 0 ? -prec0
: prec0
,
4070 prec1
< 0 ? -prec1
: prec1
);
4071 prec3
= MAX (prec3
, prec
);
4072 tree var
= NULL_TREE
;
4073 tree orig_obj
= obj
;
4074 if (obj
== NULL_TREE
4075 && TREE_CODE (type
) == BITINT_TYPE
4076 && bitint_precision_kind (type
) >= bitint_prec_large
4078 && bitmap_bit_p (m_names
, SSA_NAME_VERSION (lhs
)))
4080 int part
= var_to_partition (m_map
, lhs
);
4081 gcc_assert (m_vars
[part
] != NULL_TREE
);
4083 if (TREE_TYPE (lhs
) == type
)
4086 if (TREE_CODE (type
) != BITINT_TYPE
4087 || bitint_precision_kind (type
) < bitint_prec_large
)
4089 unsigned HOST_WIDE_INT nelts
= CEIL (prec
, limb_prec
);
4090 tree atype
= build_array_type_nelts (m_limb_type
, nelts
);
4091 var
= create_tmp_var (atype
);
4094 enum tree_code code
;
4095 switch (gimple_call_internal_fn (stmt
))
4097 case IFN_ADD_OVERFLOW
:
4098 case IFN_UBSAN_CHECK_ADD
:
4101 case IFN_SUB_OVERFLOW
:
4102 case IFN_UBSAN_CHECK_SUB
:
4108 unsigned start
, end
;
4110 tree ovf
= arith_overflow (code
, type
, prec
, prec0
, prec1
, prec2
,
4111 &start
, &end
, &check_zero
);
4113 unsigned startlimb
, endlimb
;
4121 startlimb
= start
/ limb_prec
;
4122 endlimb
= (end
- 1) / limb_prec
;
4125 int prec4
= ovf
!= NULL_TREE
? prec
: prec3
;
4126 bitint_prec_kind kind
= bitint_precision_kind (prec4
);
4127 unsigned cnt
, rem
= 0, fin
= 0;
4128 tree idx
= NULL_TREE
, idx_first
= NULL_TREE
, idx_next
= NULL_TREE
;
4129 bool last_ovf
= (ovf
== NULL_TREE
4130 && CEIL (prec2
, limb_prec
) > CEIL (prec3
, limb_prec
));
4131 if (kind
!= bitint_prec_huge
)
4132 cnt
= CEIL (prec4
, limb_prec
) + last_ovf
;
4135 rem
= (prec4
% (2 * limb_prec
));
4136 fin
= (prec4
- rem
) / limb_prec
;
4137 cnt
= 2 + CEIL (rem
, limb_prec
) + last_ovf
;
4138 idx
= idx_first
= create_loop (size_zero_node
, &idx_next
);
4141 if (kind
== bitint_prec_huge
)
4142 m_upwards_2limb
= fin
;
4145 tree type0
= TREE_TYPE (arg0
);
4146 tree type1
= TREE_TYPE (arg1
);
4148 if (bitint_precision_kind (prec5
) < bitint_prec_large
)
4149 prec5
= MAX (TYPE_PRECISION (type0
), TYPE_PRECISION (type1
));
4150 if (TYPE_PRECISION (type0
) < prec5
)
4152 type0
= build_bitint_type (prec5
, TYPE_UNSIGNED (type0
));
4153 if (TREE_CODE (arg0
) == INTEGER_CST
)
4154 arg0
= fold_convert (type0
, arg0
);
4156 if (TYPE_PRECISION (type1
) < prec5
)
4158 type1
= build_bitint_type (prec5
, TYPE_UNSIGNED (type1
));
4159 if (TREE_CODE (arg1
) == INTEGER_CST
)
4160 arg1
= fold_convert (type1
, arg1
);
4162 unsigned int data_cnt
= 0;
4163 tree last_rhs1
= NULL_TREE
, last_rhs2
= NULL_TREE
;
4164 tree cmp
= build_zero_cst (m_limb_type
);
4165 unsigned prec_limbs
= CEIL ((unsigned) prec
, limb_prec
);
4166 tree ovf_out
= NULL_TREE
, cmp_out
= NULL_TREE
;
4167 for (unsigned i
= 0; i
< cnt
; i
++)
4171 if (kind
!= bitint_prec_huge
)
4174 idx
= size_int (fin
+ i
- 2);
4175 if (!last_ovf
|| i
< cnt
- 1)
4177 if (type0
!= TREE_TYPE (arg0
))
4178 rhs1
= handle_cast (type0
, arg0
, idx
);
4180 rhs1
= handle_operand (arg0
, idx
);
4181 if (type1
!= TREE_TYPE (arg1
))
4182 rhs2
= handle_cast (type1
, arg1
, idx
);
4184 rhs2
= handle_operand (arg1
, idx
);
4186 data_cnt
= m_data_cnt
;
4187 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (rhs1
)))
4188 rhs1
= add_cast (m_limb_type
, rhs1
);
4189 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (rhs2
)))
4190 rhs2
= add_cast (m_limb_type
, rhs2
);
4196 m_data_cnt
= data_cnt
;
4197 if (TYPE_UNSIGNED (type0
) || prec0
>= 0)
4198 rhs1
= build_zero_cst (m_limb_type
);
4201 rhs1
= add_cast (signed_type_for (m_limb_type
), last_rhs1
);
4202 if (TREE_CODE (rhs1
) == INTEGER_CST
)
4203 rhs1
= build_int_cst (m_limb_type
,
4204 tree_int_cst_sgn (rhs1
) < 0 ? -1 : 0);
4207 tree lpm1
= build_int_cst (unsigned_type_node
,
4209 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1
)),
4210 RSHIFT_EXPR
, rhs1
, lpm1
);
4212 rhs1
= add_cast (m_limb_type
, gimple_assign_lhs (g
));
4215 if (TYPE_UNSIGNED (type1
) || prec1
>= 0)
4216 rhs2
= build_zero_cst (m_limb_type
);
4219 rhs2
= add_cast (signed_type_for (m_limb_type
), last_rhs2
);
4220 if (TREE_CODE (rhs2
) == INTEGER_CST
)
4221 rhs2
= build_int_cst (m_limb_type
,
4222 tree_int_cst_sgn (rhs2
) < 0 ? -1 : 0);
4225 tree lpm1
= build_int_cst (unsigned_type_node
,
4227 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (rhs2
)),
4228 RSHIFT_EXPR
, rhs2
, lpm1
);
4230 rhs2
= add_cast (m_limb_type
, gimple_assign_lhs (g
));
4234 tree rhs
= handle_plus_minus (code
, rhs1
, rhs2
, idx
);
4235 if (ovf
!= boolean_false_node
)
4237 if (tree_fits_uhwi_p (idx
))
4239 unsigned limb
= tree_to_uhwi (idx
);
4240 if (limb
>= startlimb
&& limb
<= endlimb
)
4242 tree l
= arith_overflow_extract_bits (start
, end
, rhs
,
4244 tree this_ovf
= make_ssa_name (boolean_type_node
);
4245 if (ovf
== NULL_TREE
&& !check_zero
)
4248 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
4250 build_int_cst (m_limb_type
, 1));
4252 g
= gimple_build_assign (this_ovf
, GT_EXPR
,
4253 gimple_assign_lhs (g
),
4254 build_int_cst (m_limb_type
, 1));
4257 g
= gimple_build_assign (this_ovf
, NE_EXPR
, l
, cmp
);
4259 if (ovf
== NULL_TREE
)
4263 tree b
= make_ssa_name (boolean_type_node
);
4264 g
= gimple_build_assign (b
, BIT_IOR_EXPR
, ovf
, this_ovf
);
4270 else if (startlimb
< fin
)
4272 if (m_first
&& startlimb
+ 2 < fin
)
4275 ovf
= prepare_data_in_out (boolean_false_node
, idx
, &data_out
);
4276 ovf_out
= m_data
.pop ();
4280 cmp
= prepare_data_in_out (cmp
, idx
, &data_out
);
4281 cmp_out
= m_data
.pop ();
4285 if (i
!= 0 || startlimb
!= fin
- 1)
4288 bool single_comparison
4289 = (startlimb
+ 2 >= fin
|| (startlimb
& 1) != (i
& 1));
4290 if (!single_comparison
)
4292 else if ((startlimb
& 1) == (i
& 1))
4296 g
= gimple_build_cond (cmp_code
, idx
, size_int (startlimb
),
4297 NULL_TREE
, NULL_TREE
);
4298 edge edge_true_true
, edge_true_false
, edge_false
;
4300 if (!single_comparison
)
4301 g2
= gimple_build_cond (NE_EXPR
, idx
,
4302 size_int (startlimb
), NULL_TREE
,
4304 if_then_if_then_else (g
, g2
, profile_probability::likely (),
4305 profile_probability::likely (),
4306 edge_true_true
, edge_true_false
,
4308 unsigned tidx
= startlimb
+ (cmp_code
== GT_EXPR
);
4309 tree l
= arith_overflow_extract_bits (start
, end
, rhs
, tidx
,
4311 tree this_ovf
= make_ssa_name (boolean_type_node
);
4312 if (cmp_code
!= GT_EXPR
&& !check_zero
)
4314 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
4316 build_int_cst (m_limb_type
, 1));
4318 g
= gimple_build_assign (this_ovf
, GT_EXPR
,
4319 gimple_assign_lhs (g
),
4320 build_int_cst (m_limb_type
, 1));
4323 g
= gimple_build_assign (this_ovf
, NE_EXPR
, l
, cmp
);
4325 if (cmp_code
== GT_EXPR
)
4327 tree t
= make_ssa_name (boolean_type_node
);
4328 g
= gimple_build_assign (t
, BIT_IOR_EXPR
, ovf
, this_ovf
);
4332 tree this_ovf2
= NULL_TREE
;
4333 if (!single_comparison
)
4335 m_gsi
= gsi_after_labels (edge_true_true
->src
);
4336 tree t
= make_ssa_name (boolean_type_node
);
4337 g
= gimple_build_assign (t
, NE_EXPR
, rhs
, cmp
);
4339 this_ovf2
= make_ssa_name (boolean_type_node
);
4340 g
= gimple_build_assign (this_ovf2
, BIT_IOR_EXPR
,
4344 m_gsi
= gsi_after_labels (edge_true_false
->dest
);
4346 if (i
== 1 && ovf_out
)
4349 t
= make_ssa_name (boolean_type_node
);
4350 gphi
*phi
= create_phi_node (t
, edge_true_false
->dest
);
4351 add_phi_arg (phi
, this_ovf
, edge_true_false
,
4353 add_phi_arg (phi
, ovf
? ovf
4354 : boolean_false_node
, edge_false
,
4357 add_phi_arg (phi
, this_ovf2
, edge_true_true
,
4360 if (!check_zero
&& cmp_code
!= GT_EXPR
)
4362 t
= cmp_out
? cmp_out
: make_ssa_name (m_limb_type
);
4363 phi
= create_phi_node (t
, edge_true_false
->dest
);
4364 add_phi_arg (phi
, l
, edge_true_false
, UNKNOWN_LOCATION
);
4365 add_phi_arg (phi
, cmp
, edge_false
, UNKNOWN_LOCATION
);
4367 add_phi_arg (phi
, cmp
, edge_true_true
,
4377 if (tree_fits_uhwi_p (idx
) && tree_to_uhwi (idx
) >= prec_limbs
)
4379 else if (!tree_fits_uhwi_p (idx
)
4380 && (unsigned) prec
< (fin
- (i
== 0)) * limb_prec
)
4382 bool single_comparison
4383 = (((unsigned) prec
% limb_prec
) == 0
4384 || prec_limbs
+ 1 >= fin
4385 || (prec_limbs
& 1) == (i
& 1));
4386 g
= gimple_build_cond (LE_EXPR
, idx
, size_int (prec_limbs
- 1),
4387 NULL_TREE
, NULL_TREE
);
4389 if (!single_comparison
)
4390 g2
= gimple_build_cond (EQ_EXPR
, idx
,
4391 size_int (prec_limbs
- 1),
4392 NULL_TREE
, NULL_TREE
);
4393 edge edge_true_true
, edge_true_false
, edge_false
;
4394 if_then_if_then_else (g
, g2
, profile_probability::likely (),
4395 profile_probability::unlikely (),
4396 edge_true_true
, edge_true_false
,
4398 tree l
= limb_access (type
, var
? var
: obj
, idx
, true);
4399 g
= gimple_build_assign (l
, rhs
);
4401 if (!single_comparison
)
4403 m_gsi
= gsi_after_labels (edge_true_true
->src
);
4404 tree plm1idx
= size_int (prec_limbs
- 1);
4405 tree plm1type
= limb_access_type (type
, plm1idx
);
4406 l
= limb_access (type
, var
? var
: obj
, plm1idx
, true);
4407 if (!useless_type_conversion_p (plm1type
, TREE_TYPE (rhs
)))
4408 rhs
= add_cast (plm1type
, rhs
);
4409 if (!useless_type_conversion_p (TREE_TYPE (l
),
4411 rhs
= add_cast (TREE_TYPE (l
), rhs
);
4412 g
= gimple_build_assign (l
, rhs
);
4415 m_gsi
= gsi_after_labels (edge_true_false
->dest
);
4419 tree l
= limb_access (type
, var
? var
: obj
, idx
, true);
4420 if (!useless_type_conversion_p (TREE_TYPE (l
), TREE_TYPE (rhs
)))
4421 rhs
= add_cast (TREE_TYPE (l
), rhs
);
4422 g
= gimple_build_assign (l
, rhs
);
4427 if (kind
== bitint_prec_huge
&& i
<= 1)
4431 idx
= make_ssa_name (sizetype
);
4432 g
= gimple_build_assign (idx
, PLUS_EXPR
, idx_first
,
4438 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx_first
,
4441 g
= gimple_build_cond (NE_EXPR
, idx_next
, size_int (fin
),
4442 NULL_TREE
, NULL_TREE
);
4444 m_gsi
= gsi_for_stmt (final_stmt
);
4450 finish_arith_overflow (var
, obj
, type
, ovf
, lhs
, orig_obj
, stmt
, code
);
4453 /* Lower a .MUL_OVERFLOW call with at least one large/huge _BitInt
4454 argument or return type _Complex large/huge _BitInt. */
4457 bitint_large_huge::lower_mul_overflow (tree obj
, gimple
*stmt
)
4459 tree arg0
= gimple_call_arg (stmt
, 0);
4460 tree arg1
= gimple_call_arg (stmt
, 1);
4461 tree lhs
= gimple_call_lhs (stmt
);
4464 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
4465 gsi_remove (&gsi
, true);
4468 gimple
*final_stmt
= gsi_stmt (m_gsi
);
4469 tree type
= TREE_TYPE (lhs
);
4470 if (TREE_CODE (type
) == COMPLEX_TYPE
)
4471 type
= TREE_TYPE (type
);
4472 int prec
= TYPE_PRECISION (type
), prec0
, prec1
;
4473 arg0
= handle_operand_addr (arg0
, stmt
, NULL
, &prec0
);
4474 arg1
= handle_operand_addr (arg1
, stmt
, NULL
, &prec1
);
4475 int prec2
= ((prec0
< 0 ? -prec0
: prec0
)
4476 + (prec1
< 0 ? -prec1
: prec1
));
4477 if (prec0
== 1 || prec1
== 1)
4479 tree var
= NULL_TREE
;
4480 tree orig_obj
= obj
;
4481 bool force_var
= false;
4482 if (obj
== NULL_TREE
4483 && TREE_CODE (type
) == BITINT_TYPE
4484 && bitint_precision_kind (type
) >= bitint_prec_large
4486 && bitmap_bit_p (m_names
, SSA_NAME_VERSION (lhs
)))
4488 int part
= var_to_partition (m_map
, lhs
);
4489 gcc_assert (m_vars
[part
] != NULL_TREE
);
4491 if (TREE_TYPE (lhs
) == type
)
4494 else if (obj
!= NULL_TREE
&& DECL_P (obj
))
4496 for (int i
= 0; i
< 2; ++i
)
4498 tree arg
= i
? arg1
: arg0
;
4499 if (TREE_CODE (arg
) == ADDR_EXPR
)
4500 arg
= TREE_OPERAND (arg
, 0);
4501 if (get_base_address (arg
) == obj
)
4508 if (obj
== NULL_TREE
4510 || TREE_CODE (type
) != BITINT_TYPE
4511 || bitint_precision_kind (type
) < bitint_prec_large
4512 || prec2
> (CEIL (prec
, limb_prec
) * limb_prec
* (orig_obj
? 1 : 2)))
4514 unsigned HOST_WIDE_INT nelts
= CEIL (MAX (prec
, prec2
), limb_prec
);
4515 tree atype
= build_array_type_nelts (m_limb_type
, nelts
);
4516 var
= create_tmp_var (atype
);
4518 tree addr
= build_fold_addr_expr (var
? var
: obj
);
4519 addr
= force_gimple_operand_gsi (&m_gsi
, addr
, true,
4520 NULL_TREE
, true, GSI_SAME_STMT
);
4521 tree sitype
= lang_hooks
.types
.type_for_mode (SImode
, 0);
4523 = gimple_build_call_internal (IFN_MULBITINT
, 6,
4524 addr
, build_int_cst (sitype
,
4526 arg0
, build_int_cst (sitype
, prec0
),
4527 arg1
, build_int_cst (sitype
, prec1
));
4530 unsigned start
, end
;
4532 tree ovf
= arith_overflow (MULT_EXPR
, type
, prec
, prec0
, prec1
, prec2
,
4533 &start
, &end
, &check_zero
);
4534 if (ovf
== NULL_TREE
)
4536 unsigned startlimb
= start
/ limb_prec
;
4537 unsigned endlimb
= (end
- 1) / limb_prec
;
4539 bool use_loop
= false;
4540 if (startlimb
== endlimb
)
4542 else if (startlimb
+ 1 == endlimb
)
4544 else if ((end
% limb_prec
) == 0)
4552 use_loop
= startlimb
+ 2 < endlimb
;
4556 tree l
= limb_access (NULL_TREE
, var
? var
: obj
,
4557 size_int (startlimb
), true);
4558 g
= gimple_build_assign (make_ssa_name (m_limb_type
), l
);
4560 l
= arith_overflow_extract_bits (start
, end
, gimple_assign_lhs (g
),
4561 startlimb
, check_zero
);
4562 ovf
= make_ssa_name (boolean_type_node
);
4564 g
= gimple_build_assign (ovf
, NE_EXPR
, l
,
4565 build_zero_cst (m_limb_type
));
4568 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
4570 build_int_cst (m_limb_type
, 1));
4572 g
= gimple_build_assign (ovf
, GT_EXPR
, gimple_assign_lhs (g
),
4573 build_int_cst (m_limb_type
, 1));
4579 basic_block edge_bb
= NULL
;
4580 gimple_stmt_iterator gsi
= m_gsi
;
4582 edge e
= split_block (gsi_bb (gsi
), gsi_stmt (gsi
));
4584 m_gsi
= gsi_end_bb (edge_bb
);
4586 tree cmp
= build_zero_cst (m_limb_type
);
4587 for (unsigned i
= 0; i
< cnt
; i
++)
4589 tree idx
, idx_next
= NULL_TREE
;
4591 idx
= size_int (startlimb
);
4593 idx
= size_int (endlimb
);
4595 idx
= create_loop (size_int (startlimb
+ 1), &idx_next
);
4597 idx
= size_int (startlimb
+ 1);
4598 tree l
= limb_access (NULL_TREE
, var
? var
: obj
, idx
, true);
4599 g
= gimple_build_assign (make_ssa_name (m_limb_type
), l
);
4601 l
= gimple_assign_lhs (g
);
4602 if (i
== 0 || i
== 2)
4603 l
= arith_overflow_extract_bits (start
, end
, l
,
4606 if (i
== 0 && !check_zero
)
4609 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
4611 build_int_cst (m_limb_type
, 1));
4613 g
= gimple_build_cond (GT_EXPR
, gimple_assign_lhs (g
),
4614 build_int_cst (m_limb_type
, 1),
4615 NULL_TREE
, NULL_TREE
);
4618 g
= gimple_build_cond (NE_EXPR
, l
, cmp
, NULL_TREE
, NULL_TREE
);
4620 edge e1
= split_block (gsi_bb (m_gsi
), g
);
4621 e1
->flags
= EDGE_FALSE_VALUE
;
4622 edge e2
= make_edge (e1
->src
, gimple_bb (final_stmt
),
4624 e1
->probability
= profile_probability::likely ();
4625 e2
->probability
= e1
->probability
.invert ();
4627 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e2
->src
);
4628 m_gsi
= gsi_after_labels (e1
->dest
);
4629 if (i
== 1 && use_loop
)
4631 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
,
4634 g
= gimple_build_cond (NE_EXPR
, idx_next
,
4635 size_int (endlimb
+ (cnt
== 2)),
4636 NULL_TREE
, NULL_TREE
);
4638 edge true_edge
, false_edge
;
4639 extract_true_false_edges_from_block (gsi_bb (m_gsi
),
4642 m_gsi
= gsi_after_labels (false_edge
->dest
);
4647 ovf
= make_ssa_name (boolean_type_node
);
4648 basic_block bb
= gimple_bb (final_stmt
);
4649 gphi
*phi
= create_phi_node (ovf
, bb
);
4650 edge e1
= find_edge (gsi_bb (m_gsi
), bb
);
4652 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
4654 tree val
= e
== e1
? boolean_false_node
: boolean_true_node
;
4655 add_phi_arg (phi
, val
, e
, UNKNOWN_LOCATION
);
4657 m_gsi
= gsi_for_stmt (final_stmt
);
4661 finish_arith_overflow (var
, obj
, type
, ovf
, lhs
, orig_obj
, stmt
, MULT_EXPR
);
4664 /* Lower REALPART_EXPR or IMAGPART_EXPR stmt extracting part of result from
4665 .{ADD,SUB,MUL}_OVERFLOW call. */
4668 bitint_large_huge::lower_cplxpart_stmt (tree obj
, gimple
*stmt
)
4670 tree rhs1
= gimple_assign_rhs1 (stmt
);
4671 rhs1
= TREE_OPERAND (rhs1
, 0);
4672 if (obj
== NULL_TREE
)
4674 int part
= var_to_partition (m_map
, gimple_assign_lhs (stmt
));
4675 gcc_assert (m_vars
[part
] != NULL_TREE
);
4678 if (TREE_CODE (rhs1
) == SSA_NAME
4680 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (rhs1
))))
4682 lower_call (obj
, SSA_NAME_DEF_STMT (rhs1
));
4685 int part
= var_to_partition (m_map
, rhs1
);
4686 gcc_assert (m_vars
[part
] != NULL_TREE
);
4687 tree var
= m_vars
[part
];
4688 unsigned HOST_WIDE_INT nelts
4689 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj
))) / limb_prec
;
4690 tree atype
= build_array_type_nelts (m_limb_type
, nelts
);
4691 if (!useless_type_conversion_p (atype
, TREE_TYPE (obj
)))
4692 obj
= build1 (VIEW_CONVERT_EXPR
, atype
, obj
);
4693 tree off
= build_int_cst (build_pointer_type (TREE_TYPE (var
)),
4694 gimple_assign_rhs_code (stmt
) == REALPART_EXPR
4695 ? 0 : nelts
* m_limb_size
);
4696 tree v2
= build2 (MEM_REF
, atype
, build_fold_addr_expr (var
), off
);
4697 gimple
*g
= gimple_build_assign (obj
, v2
);
4701 /* Lower COMPLEX_EXPR stmt. */
4704 bitint_large_huge::lower_complexexpr_stmt (gimple
*stmt
)
4706 tree lhs
= gimple_assign_lhs (stmt
);
4707 tree rhs1
= gimple_assign_rhs1 (stmt
);
4708 tree rhs2
= gimple_assign_rhs2 (stmt
);
4709 int part
= var_to_partition (m_map
, lhs
);
4710 gcc_assert (m_vars
[part
] != NULL_TREE
);
4712 unsigned HOST_WIDE_INT nelts
4713 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (rhs1
))) / limb_prec
;
4714 tree atype
= build_array_type_nelts (m_limb_type
, nelts
);
4715 tree zero
= build_zero_cst (build_pointer_type (TREE_TYPE (lhs
)));
4716 tree v1
= build2 (MEM_REF
, atype
, build_fold_addr_expr (lhs
), zero
);
4718 if (TREE_CODE (rhs1
) == SSA_NAME
)
4720 part
= var_to_partition (m_map
, rhs1
);
4721 gcc_assert (m_vars
[part
] != NULL_TREE
);
4724 else if (integer_zerop (rhs1
))
4725 v2
= build_zero_cst (atype
);
4727 v2
= tree_output_constant_def (rhs1
);
4728 if (!useless_type_conversion_p (atype
, TREE_TYPE (v2
)))
4729 v2
= build1 (VIEW_CONVERT_EXPR
, atype
, v2
);
4730 gimple
*g
= gimple_build_assign (v1
, v2
);
4732 tree off
= fold_convert (build_pointer_type (TREE_TYPE (lhs
)),
4733 TYPE_SIZE_UNIT (atype
));
4734 v1
= build2 (MEM_REF
, atype
, build_fold_addr_expr (lhs
), off
);
4735 if (TREE_CODE (rhs2
) == SSA_NAME
)
4737 part
= var_to_partition (m_map
, rhs2
);
4738 gcc_assert (m_vars
[part
] != NULL_TREE
);
4741 else if (integer_zerop (rhs2
))
4742 v2
= build_zero_cst (atype
);
4744 v2
= tree_output_constant_def (rhs2
);
4745 if (!useless_type_conversion_p (atype
, TREE_TYPE (v2
)))
4746 v2
= build1 (VIEW_CONVERT_EXPR
, atype
, v2
);
4747 g
= gimple_build_assign (v1
, v2
);
4751 /* Lower a .{CLZ,CTZ,CLRSB,FFS,PARITY,POPCOUNT} call with one large/huge _BitInt
4755 bitint_large_huge::lower_bit_query (gimple
*stmt
)
4757 tree arg0
= gimple_call_arg (stmt
, 0);
4758 tree arg1
= (gimple_call_num_args (stmt
) == 2
4759 ? gimple_call_arg (stmt
, 1) : NULL_TREE
);
4760 tree lhs
= gimple_call_lhs (stmt
);
4765 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
4766 gsi_remove (&gsi
, true);
4769 tree type
= TREE_TYPE (arg0
);
4770 gcc_assert (TREE_CODE (type
) == BITINT_TYPE
);
4771 bitint_prec_kind kind
= bitint_precision_kind (type
);
4772 gcc_assert (kind
>= bitint_prec_large
);
4773 enum internal_fn ifn
= gimple_call_internal_fn (stmt
);
4774 enum built_in_function fcode
= END_BUILTINS
;
4775 gcc_assert (TYPE_PRECISION (unsigned_type_node
) == limb_prec
4776 || TYPE_PRECISION (long_unsigned_type_node
) == limb_prec
4777 || TYPE_PRECISION (long_long_unsigned_type_node
) == limb_prec
);
4781 if (TYPE_PRECISION (unsigned_type_node
) == limb_prec
)
4782 fcode
= BUILT_IN_CLZ
;
4783 else if (TYPE_PRECISION (long_unsigned_type_node
) == limb_prec
)
4784 fcode
= BUILT_IN_CLZL
;
4786 fcode
= BUILT_IN_CLZLL
;
4789 /* .FFS (X) is .CTZ (X, -1) + 1, though under the hood
4790 we don't add the addend at the end. */
4791 arg1
= integer_zero_node
;
4794 if (TYPE_PRECISION (unsigned_type_node
) == limb_prec
)
4795 fcode
= BUILT_IN_CTZ
;
4796 else if (TYPE_PRECISION (long_unsigned_type_node
) == limb_prec
)
4797 fcode
= BUILT_IN_CTZL
;
4799 fcode
= BUILT_IN_CTZLL
;
4803 if (TYPE_PRECISION (unsigned_type_node
) == limb_prec
)
4804 fcode
= BUILT_IN_CLRSB
;
4805 else if (TYPE_PRECISION (long_unsigned_type_node
) == limb_prec
)
4806 fcode
= BUILT_IN_CLRSBL
;
4808 fcode
= BUILT_IN_CLRSBLL
;
4811 if (TYPE_PRECISION (unsigned_type_node
) == limb_prec
)
4812 fcode
= BUILT_IN_PARITY
;
4813 else if (TYPE_PRECISION (long_unsigned_type_node
) == limb_prec
)
4814 fcode
= BUILT_IN_PARITYL
;
4816 fcode
= BUILT_IN_PARITYLL
;
4820 if (TYPE_PRECISION (unsigned_type_node
) == limb_prec
)
4821 fcode
= BUILT_IN_POPCOUNT
;
4822 else if (TYPE_PRECISION (long_unsigned_type_node
) == limb_prec
)
4823 fcode
= BUILT_IN_POPCOUNTL
;
4825 fcode
= BUILT_IN_POPCOUNTLL
;
4831 tree fndecl
= builtin_decl_explicit (fcode
), res
= NULL_TREE
;
4832 unsigned cnt
= 0, rem
= 0, end
= 0, prec
= TYPE_PRECISION (type
);
4833 struct bq_details
{ edge e
; tree val
, addend
; } *bqp
= NULL
;
4834 basic_block edge_bb
= NULL
;
4837 tree idx
= NULL_TREE
, idx_first
= NULL_TREE
, idx_next
= NULL_TREE
;
4838 if (kind
== bitint_prec_large
)
4839 cnt
= CEIL (prec
, limb_prec
);
4842 rem
= (prec
% (2 * limb_prec
));
4843 end
= (prec
- rem
) / limb_prec
;
4844 cnt
= 2 + CEIL (rem
, limb_prec
);
4845 idx
= idx_first
= create_loop (size_zero_node
, &idx_next
);
4848 if (ifn
== IFN_CTZ
|| ifn
== IFN_FFS
)
4850 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
4852 edge e
= split_block (gsi_bb (gsi
), gsi_stmt (gsi
));
4854 if (kind
== bitint_prec_large
)
4855 m_gsi
= gsi_end_bb (edge_bb
);
4856 bqp
= XALLOCAVEC (struct bq_details
, cnt
);
4859 m_after_stmt
= stmt
;
4860 if (kind
!= bitint_prec_large
)
4861 m_upwards_2limb
= end
;
4863 for (unsigned i
= 0; i
< cnt
; i
++)
4866 if (kind
== bitint_prec_large
)
4869 idx
= size_int (end
+ (i
> 2));
4871 tree rhs1
= handle_operand (arg0
, idx
);
4872 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (rhs1
)))
4874 if (!TYPE_UNSIGNED (TREE_TYPE (rhs1
)))
4875 rhs1
= add_cast (unsigned_type_for (TREE_TYPE (rhs1
)), rhs1
);
4876 rhs1
= add_cast (m_limb_type
, rhs1
);
4880 if (ifn
== IFN_PARITY
)
4881 in
= prepare_data_in_out (build_zero_cst (m_limb_type
), idx
, &out
);
4882 else if (ifn
== IFN_FFS
)
4883 in
= prepare_data_in_out (integer_one_node
, idx
, &out
);
4885 in
= prepare_data_in_out (integer_zero_node
, idx
, &out
);
4891 g
= gimple_build_cond (NE_EXPR
, rhs1
,
4892 build_zero_cst (m_limb_type
),
4893 NULL_TREE
, NULL_TREE
);
4896 e1
= split_block (gsi_bb (m_gsi
), g
);
4897 e1
->flags
= EDGE_FALSE_VALUE
;
4898 e2
= make_edge (e1
->src
, gimple_bb (stmt
), EDGE_TRUE_VALUE
);
4899 e1
->probability
= profile_probability::unlikely ();
4900 e2
->probability
= e1
->probability
.invert ();
4902 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e2
->src
);
4903 m_gsi
= gsi_after_labels (e1
->dest
);
4906 if (tree_fits_uhwi_p (idx
))
4908 = build_int_cst (integer_type_node
,
4909 tree_to_uhwi (idx
) * limb_prec
4910 + (ifn
== IFN_FFS
));
4917 res
= make_ssa_name (integer_type_node
);
4918 g
= gimple_build_assign (res
, PLUS_EXPR
, in
,
4919 build_int_cst (integer_type_node
,
4922 m_data
[m_data_cnt
] = res
;
4926 if (!integer_zerop (in
))
4928 if (kind
== bitint_prec_huge
&& i
== 1)
4931 res
= make_ssa_name (m_limb_type
);
4932 g
= gimple_build_assign (res
, BIT_XOR_EXPR
, in
, rhs1
);
4937 m_data
[m_data_cnt
] = res
;
4940 g
= gimple_build_call (fndecl
, 1, rhs1
);
4941 tem
= make_ssa_name (integer_type_node
);
4942 gimple_call_set_lhs (g
, tem
);
4944 if (!integer_zerop (in
))
4946 if (kind
== bitint_prec_huge
&& i
== 1)
4949 res
= make_ssa_name (integer_type_node
);
4950 g
= gimple_build_assign (res
, PLUS_EXPR
, in
, tem
);
4955 m_data
[m_data_cnt
] = res
;
4962 if (kind
== bitint_prec_huge
&& i
<= 1)
4966 idx
= make_ssa_name (sizetype
);
4967 g
= gimple_build_assign (idx
, PLUS_EXPR
, idx_first
,
4973 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx_first
,
4976 g
= gimple_build_cond (NE_EXPR
, idx_next
, size_int (end
),
4977 NULL_TREE
, NULL_TREE
);
4979 if (ifn
== IFN_CTZ
|| ifn
== IFN_FFS
)
4980 m_gsi
= gsi_after_labels (edge_bb
);
4982 m_gsi
= gsi_for_stmt (stmt
);
4990 tree idx
= NULL_TREE
, idx_next
= NULL_TREE
, first
= NULL_TREE
;
4992 if (kind
== bitint_prec_large
)
4993 cnt
= CEIL (prec
, limb_prec
);
4996 rem
= prec
% limb_prec
;
4997 if (rem
== 0 && (!TYPE_UNSIGNED (type
) || ifn
== IFN_CLRSB
))
4999 end
= (prec
- rem
) / limb_prec
;
5000 cnt
= 1 + (rem
!= 0);
5001 if (ifn
== IFN_CLRSB
)
5005 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
5007 edge e
= split_block (gsi_bb (gsi
), gsi_stmt (gsi
));
5009 m_gsi
= gsi_end_bb (edge_bb
);
5012 bqp
= XALLOCAVEC (struct bq_details
, cnt
);
5015 gsi
= gsi_for_stmt (stmt
);
5017 e
= split_block (gsi_bb (gsi
), gsi_stmt (gsi
));
5019 bqp
= XALLOCAVEC (struct bq_details
, 2 * cnt
);
5022 for (unsigned i
= 0; i
< cnt
; i
++)
5025 if (kind
== bitint_prec_large
)
5026 idx
= size_int (cnt
- i
- 1);
5027 else if (i
== cnt
- 1)
5028 idx
= create_loop (size_int (end
- 1), &idx_next
);
5030 idx
= size_int (end
);
5032 tree rhs1
= handle_operand (arg0
, idx
);
5033 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (rhs1
)))
5035 if (ifn
== IFN_CLZ
&& !TYPE_UNSIGNED (TREE_TYPE (rhs1
)))
5036 rhs1
= add_cast (unsigned_type_for (TREE_TYPE (rhs1
)), rhs1
);
5037 else if (ifn
== IFN_CLRSB
&& TYPE_UNSIGNED (TREE_TYPE (rhs1
)))
5038 rhs1
= add_cast (signed_type_for (TREE_TYPE (rhs1
)), rhs1
);
5039 rhs1
= add_cast (m_limb_type
, rhs1
);
5044 g
= gimple_build_cond (NE_EXPR
, rhs1
,
5045 build_zero_cst (m_limb_type
),
5046 NULL_TREE
, NULL_TREE
);
5048 edge e1
= split_block (gsi_bb (m_gsi
), g
);
5049 e1
->flags
= EDGE_FALSE_VALUE
;
5050 edge e2
= make_edge (e1
->src
, gimple_bb (stmt
), EDGE_TRUE_VALUE
);
5051 e1
->probability
= profile_probability::unlikely ();
5052 e2
->probability
= e1
->probability
.invert ();
5054 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e2
->src
);
5055 m_gsi
= gsi_after_labels (e1
->dest
);
5064 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
5066 build_int_cst (m_limb_type
, 1));
5068 g
= gimple_build_cond (GT_EXPR
, gimple_assign_lhs (g
),
5069 build_int_cst (m_limb_type
, 1),
5070 NULL_TREE
, NULL_TREE
);
5075 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
5076 BIT_XOR_EXPR
, rhs1
, first
);
5078 tree stype
= signed_type_for (m_limb_type
);
5079 g
= gimple_build_cond (LT_EXPR
,
5081 gimple_assign_lhs (g
)),
5082 build_zero_cst (stype
),
5083 NULL_TREE
, NULL_TREE
);
5085 edge e1
= split_block (gsi_bb (m_gsi
), g
);
5086 e1
->flags
= EDGE_FALSE_VALUE
;
5087 edge e2
= make_edge (e1
->src
, gimple_bb (stmt
),
5089 e1
->probability
= profile_probability::unlikely ();
5090 e2
->probability
= e1
->probability
.invert ();
5092 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
,
5094 m_gsi
= gsi_after_labels (e1
->dest
);
5096 g
= gimple_build_cond (NE_EXPR
, rhs1
, first
,
5097 NULL_TREE
, NULL_TREE
);
5100 edge e1
= split_block (gsi_bb (m_gsi
), g
);
5101 e1
->flags
= EDGE_FALSE_VALUE
;
5102 edge e2
= make_edge (e1
->src
, edge_bb
, EDGE_TRUE_VALUE
);
5103 e1
->probability
= profile_probability::unlikely ();
5104 e2
->probability
= e1
->probability
.invert ();
5106 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e2
->src
);
5107 m_gsi
= gsi_after_labels (e1
->dest
);
5108 bqp
[2 * i
+ 1].e
= e2
;
5111 if (tree_fits_uhwi_p (idx
))
5113 = build_int_cst (integer_type_node
,
5115 - (((int) tree_to_uhwi (idx
) + 1)
5116 * limb_prec
) - sub_one
);
5120 in
= build_int_cst (integer_type_node
, rem
- sub_one
);
5122 in
= prepare_data_in_out (in
, idx
, &out
);
5123 out
= m_data
[m_data_cnt
+ 1];
5125 g
= gimple_build_assign (out
, PLUS_EXPR
, in
,
5126 build_int_cst (integer_type_node
,
5129 m_data
[m_data_cnt
] = out
;
5133 if (kind
== bitint_prec_huge
&& i
== cnt
- 1)
5135 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
,
5138 g
= gimple_build_cond (NE_EXPR
, idx
, size_zero_node
,
5139 NULL_TREE
, NULL_TREE
);
5141 edge true_edge
, false_edge
;
5142 extract_true_false_edges_from_block (gsi_bb (m_gsi
),
5143 &true_edge
, &false_edge
);
5144 m_gsi
= gsi_after_labels (false_edge
->dest
);
5154 gphi
*phi1
, *phi2
, *phi3
;
5156 bb
= gsi_bb (m_gsi
);
5157 remove_edge (find_edge (bb
, gimple_bb (stmt
)));
5158 phi1
= create_phi_node (make_ssa_name (m_limb_type
),
5160 phi2
= create_phi_node (make_ssa_name (integer_type_node
),
5162 for (unsigned i
= 0; i
< cnt
; i
++)
5164 add_phi_arg (phi1
, bqp
[i
].val
, bqp
[i
].e
, UNKNOWN_LOCATION
);
5165 add_phi_arg (phi2
, bqp
[i
].addend
, bqp
[i
].e
, UNKNOWN_LOCATION
);
5167 if (arg1
== NULL_TREE
)
5169 g
= gimple_build_builtin_unreachable (m_loc
);
5172 m_gsi
= gsi_for_stmt (stmt
);
5173 g
= gimple_build_call (fndecl
, 1, gimple_phi_result (phi1
));
5174 gimple_call_set_lhs (g
, make_ssa_name (integer_type_node
));
5176 if (arg1
== NULL_TREE
)
5177 g
= gimple_build_assign (lhs
, PLUS_EXPR
,
5178 gimple_phi_result (phi2
),
5179 gimple_call_lhs (g
));
5182 g
= gimple_build_assign (make_ssa_name (integer_type_node
),
5183 PLUS_EXPR
, gimple_phi_result (phi2
),
5184 gimple_call_lhs (g
));
5186 edge e1
= split_block (gimple_bb (stmt
), g
);
5187 edge e2
= make_edge (bb
, e1
->dest
, EDGE_FALLTHRU
);
5188 e2
->probability
= profile_probability::always ();
5189 set_immediate_dominator (CDI_DOMINATORS
, e1
->dest
,
5190 get_immediate_dominator (CDI_DOMINATORS
,
5192 phi3
= create_phi_node (make_ssa_name (integer_type_node
), e1
->dest
);
5193 add_phi_arg (phi3
, gimple_assign_lhs (g
), e1
, UNKNOWN_LOCATION
);
5194 add_phi_arg (phi3
, arg1
, e2
, UNKNOWN_LOCATION
);
5195 m_gsi
= gsi_for_stmt (stmt
);
5196 g
= gimple_build_assign (lhs
, gimple_phi_result (phi3
));
5198 gsi_replace (&m_gsi
, g
, true);
5201 bb
= gsi_bb (m_gsi
);
5202 remove_edge (find_edge (bb
, edge_bb
));
5204 e
= make_edge (bb
, gimple_bb (stmt
), EDGE_FALLTHRU
);
5205 e
->probability
= profile_probability::always ();
5206 set_immediate_dominator (CDI_DOMINATORS
, gimple_bb (stmt
),
5207 get_immediate_dominator (CDI_DOMINATORS
,
5209 phi1
= create_phi_node (make_ssa_name (m_limb_type
),
5211 phi2
= create_phi_node (make_ssa_name (integer_type_node
),
5213 phi3
= create_phi_node (make_ssa_name (integer_type_node
),
5215 for (unsigned i
= 0; i
< cnt
; i
++)
5217 add_phi_arg (phi1
, bqp
[i
].val
, bqp
[2 * i
+ 1].e
, UNKNOWN_LOCATION
);
5218 add_phi_arg (phi2
, bqp
[i
].addend
, bqp
[2 * i
+ 1].e
,
5220 tree a
= bqp
[i
].addend
;
5221 if (i
&& kind
== bitint_prec_large
)
5222 a
= int_const_binop (PLUS_EXPR
, a
, integer_minus_one_node
);
5224 add_phi_arg (phi3
, a
, bqp
[2 * i
].e
, UNKNOWN_LOCATION
);
5226 add_phi_arg (phi3
, build_int_cst (integer_type_node
, prec
- 1), e
,
5228 m_gsi
= gsi_after_labels (edge_bb
);
5229 g
= gimple_build_call (fndecl
, 1,
5230 add_cast (signed_type_for (m_limb_type
),
5231 gimple_phi_result (phi1
)));
5232 gimple_call_set_lhs (g
, make_ssa_name (integer_type_node
));
5234 g
= gimple_build_assign (make_ssa_name (integer_type_node
),
5235 PLUS_EXPR
, gimple_call_lhs (g
),
5236 gimple_phi_result (phi2
));
5238 if (kind
!= bitint_prec_large
)
5240 g
= gimple_build_assign (make_ssa_name (integer_type_node
),
5241 PLUS_EXPR
, gimple_assign_lhs (g
),
5245 add_phi_arg (phi3
, gimple_assign_lhs (g
),
5246 find_edge (edge_bb
, gimple_bb (stmt
)), UNKNOWN_LOCATION
);
5247 m_gsi
= gsi_for_stmt (stmt
);
5248 g
= gimple_build_assign (lhs
, gimple_phi_result (phi3
));
5249 gsi_replace (&m_gsi
, g
, true);
5252 g
= gimple_build_call (fndecl
, 1, res
);
5253 gimple_call_set_lhs (g
, lhs
);
5254 gsi_replace (&m_gsi
, g
, true);
5257 g
= gimple_build_assign (lhs
, res
);
5258 gsi_replace (&m_gsi
, g
, true);
5265 /* Lower a call statement with one or more large/huge _BitInt
5266 arguments or large/huge _BitInt return value. */
5269 bitint_large_huge::lower_call (tree obj
, gimple
*stmt
)
5271 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
5272 unsigned int nargs
= gimple_call_num_args (stmt
);
5273 if (gimple_call_internal_p (stmt
))
5274 switch (gimple_call_internal_fn (stmt
))
5276 case IFN_ADD_OVERFLOW
:
5277 case IFN_SUB_OVERFLOW
:
5278 case IFN_UBSAN_CHECK_ADD
:
5279 case IFN_UBSAN_CHECK_SUB
:
5280 lower_addsub_overflow (obj
, stmt
);
5282 case IFN_MUL_OVERFLOW
:
5283 case IFN_UBSAN_CHECK_MUL
:
5284 lower_mul_overflow (obj
, stmt
);
5292 lower_bit_query (stmt
);
5297 bool returns_twice
= (gimple_call_flags (stmt
) & ECF_RETURNS_TWICE
) != 0;
5298 for (unsigned int i
= 0; i
< nargs
; ++i
)
5300 tree arg
= gimple_call_arg (stmt
, i
);
5301 if (TREE_CODE (arg
) != SSA_NAME
5302 || TREE_CODE (TREE_TYPE (arg
)) != BITINT_TYPE
5303 || bitint_precision_kind (TREE_TYPE (arg
)) <= bitint_prec_middle
)
5305 if (SSA_NAME_IS_DEFAULT_DEF (arg
)
5306 && (!SSA_NAME_VAR (arg
) || VAR_P (SSA_NAME_VAR (arg
))))
5308 tree var
= create_tmp_reg (TREE_TYPE (arg
));
5309 arg
= get_or_create_ssa_default_def (cfun
, var
);
5313 int p
= var_to_partition (m_map
, arg
);
5315 gcc_assert (v
!= NULL_TREE
);
5316 if (!types_compatible_p (TREE_TYPE (arg
), TREE_TYPE (v
)))
5317 v
= build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (arg
), v
);
5318 arg
= make_ssa_name (TREE_TYPE (arg
));
5319 gimple
*g
= gimple_build_assign (arg
, v
);
5320 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
5321 if (returns_twice
&& bb_has_abnormal_pred (gimple_bb (stmt
)))
5323 m_returns_twice_calls
.safe_push (stmt
);
5324 returns_twice
= false;
5327 gimple_call_set_arg (stmt
, i
, arg
);
5328 if (m_preserved
== NULL
)
5329 m_preserved
= BITMAP_ALLOC (NULL
);
5330 bitmap_set_bit (m_preserved
, SSA_NAME_VERSION (arg
));
5332 tree lhs
= gimple_call_lhs (stmt
);
5334 && TREE_CODE (lhs
) == SSA_NAME
5335 && TREE_CODE (TREE_TYPE (lhs
)) == BITINT_TYPE
5336 && bitint_precision_kind (TREE_TYPE (lhs
)) >= bitint_prec_large
)
5338 int p
= var_to_partition (m_map
, lhs
);
5340 gcc_assert (v
!= NULL_TREE
);
5341 if (!types_compatible_p (TREE_TYPE (lhs
), TREE_TYPE (v
)))
5342 v
= build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (lhs
), v
);
5343 gimple_call_set_lhs (stmt
, v
);
5344 SSA_NAME_DEF_STMT (lhs
) = gimple_build_nop ();
5349 /* Lower __asm STMT which involves large/huge _BitInt values. */
5352 bitint_large_huge::lower_asm (gimple
*stmt
)
5354 gasm
*g
= as_a
<gasm
*> (stmt
);
5355 unsigned noutputs
= gimple_asm_noutputs (g
);
5356 unsigned ninputs
= gimple_asm_ninputs (g
);
5358 for (unsigned i
= 0; i
< noutputs
; ++i
)
5360 tree t
= gimple_asm_output_op (g
, i
);
5361 tree s
= TREE_VALUE (t
);
5362 if (TREE_CODE (s
) == SSA_NAME
5363 && TREE_CODE (TREE_TYPE (s
)) == BITINT_TYPE
5364 && bitint_precision_kind (TREE_TYPE (s
)) >= bitint_prec_large
)
5366 int part
= var_to_partition (m_map
, s
);
5367 gcc_assert (m_vars
[part
] != NULL_TREE
);
5368 TREE_VALUE (t
) = m_vars
[part
];
5371 for (unsigned i
= 0; i
< ninputs
; ++i
)
5373 tree t
= gimple_asm_input_op (g
, i
);
5374 tree s
= TREE_VALUE (t
);
5375 if (TREE_CODE (s
) == SSA_NAME
5376 && TREE_CODE (TREE_TYPE (s
)) == BITINT_TYPE
5377 && bitint_precision_kind (TREE_TYPE (s
)) >= bitint_prec_large
)
5379 if (SSA_NAME_IS_DEFAULT_DEF (s
)
5380 && (!SSA_NAME_VAR (s
) || VAR_P (SSA_NAME_VAR (s
))))
5382 TREE_VALUE (t
) = create_tmp_var (TREE_TYPE (s
), "bitint");
5383 mark_addressable (TREE_VALUE (t
));
5387 int part
= var_to_partition (m_map
, s
);
5388 gcc_assert (m_vars
[part
] != NULL_TREE
);
5389 TREE_VALUE (t
) = m_vars
[part
];
5396 /* Lower statement STMT which involves large/huge _BitInt values
5397 into code accessing individual limbs. */
5400 bitint_large_huge::lower_stmt (gimple
*stmt
)
5404 m_data
.truncate (0);
5406 m_gsi
= gsi_for_stmt (stmt
);
5407 m_after_stmt
= NULL
;
5410 gsi_prev (&m_init_gsi
);
5411 m_preheader_bb
= NULL
;
5412 m_upwards_2limb
= 0;
5415 m_cast_conditional
= false;
5417 m_loc
= gimple_location (stmt
);
5418 if (is_gimple_call (stmt
))
5420 lower_call (NULL_TREE
, stmt
);
5423 if (gimple_code (stmt
) == GIMPLE_ASM
)
5428 tree lhs
= NULL_TREE
, cmp_op1
= NULL_TREE
, cmp_op2
= NULL_TREE
;
5429 tree_code cmp_code
= comparison_op (stmt
, &cmp_op1
, &cmp_op2
);
5430 bool eq_p
= (cmp_code
== EQ_EXPR
|| cmp_code
== NE_EXPR
);
5431 bool mergeable_cast_p
= false;
5432 bool final_cast_p
= false;
5433 if (gimple_assign_cast_p (stmt
))
5435 lhs
= gimple_assign_lhs (stmt
);
5436 tree rhs1
= gimple_assign_rhs1 (stmt
);
5437 if (TREE_CODE (rhs1
) == VIEW_CONVERT_EXPR
)
5438 rhs1
= TREE_OPERAND (rhs1
, 0);
5439 if (TREE_CODE (TREE_TYPE (lhs
)) == BITINT_TYPE
5440 && bitint_precision_kind (TREE_TYPE (lhs
)) >= bitint_prec_large
5441 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1
)))
5442 mergeable_cast_p
= true;
5443 else if (TREE_CODE (TREE_TYPE (rhs1
)) == BITINT_TYPE
5444 && bitint_precision_kind (TREE_TYPE (rhs1
)) >= bitint_prec_large
5445 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
5446 || POINTER_TYPE_P (TREE_TYPE (lhs
))
5447 || gimple_assign_rhs_code (stmt
) == VIEW_CONVERT_EXPR
))
5449 final_cast_p
= true;
5450 if (((TREE_CODE (TREE_TYPE (lhs
)) == INTEGER_TYPE
5451 && TYPE_PRECISION (TREE_TYPE (lhs
)) > MAX_FIXED_MODE_SIZE
)
5452 || (!INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
5453 && !POINTER_TYPE_P (TREE_TYPE (lhs
))))
5454 && gimple_assign_rhs_code (stmt
) == VIEW_CONVERT_EXPR
)
5456 /* Handle VIEW_CONVERT_EXPRs to not generally supported
5457 huge INTEGER_TYPEs like uint256_t or uint512_t. These
5458 are usually emitted from memcpy folding and backends
5459 support moves with them but that is usually it.
5460 Similarly handle VCEs to vector/complex types etc. */
5461 gcc_assert (TREE_CODE (rhs1
) == SSA_NAME
);
5462 if (SSA_NAME_IS_DEFAULT_DEF (rhs1
)
5463 && (!SSA_NAME_VAR (rhs1
) || VAR_P (SSA_NAME_VAR (rhs1
))))
5465 tree var
= create_tmp_reg (TREE_TYPE (lhs
));
5466 rhs1
= get_or_create_ssa_default_def (cfun
, var
);
5467 gimple_assign_set_rhs1 (stmt
, rhs1
);
5468 gimple_assign_set_rhs_code (stmt
, SSA_NAME
);
5470 else if (m_names
== NULL
5471 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (rhs1
)))
5473 gimple
*g
= SSA_NAME_DEF_STMT (rhs1
);
5474 gcc_assert (gimple_assign_load_p (g
));
5475 tree mem
= gimple_assign_rhs1 (g
);
5476 tree ltype
= TREE_TYPE (lhs
);
5477 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (mem
));
5478 if (as
!= TYPE_ADDR_SPACE (ltype
))
5480 = build_qualified_type (ltype
,
5482 | ENCODE_QUAL_ADDR_SPACE (as
));
5483 rhs1
= build1 (VIEW_CONVERT_EXPR
, ltype
, unshare_expr (mem
));
5484 gimple_assign_set_rhs1 (stmt
, rhs1
);
5488 int part
= var_to_partition (m_map
, rhs1
);
5489 gcc_assert (m_vars
[part
] != NULL_TREE
);
5490 rhs1
= build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (lhs
),
5492 gimple_assign_set_rhs1 (stmt
, rhs1
);
5497 if (TREE_CODE (rhs1
) == SSA_NAME
5499 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (rhs1
))))
5501 gimple
*g
= SSA_NAME_DEF_STMT (rhs1
);
5502 if (is_gimple_assign (g
)
5503 && gimple_assign_rhs_code (g
) == IMAGPART_EXPR
)
5505 tree rhs2
= TREE_OPERAND (gimple_assign_rhs1 (g
), 0);
5506 if (TREE_CODE (rhs2
) == SSA_NAME
5508 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (rhs2
))))
5510 g
= SSA_NAME_DEF_STMT (rhs2
);
5511 int ovf
= optimizable_arith_overflow (g
);
5513 /* If .{ADD,SUB,MUL}_OVERFLOW has both REALPART_EXPR
5514 and IMAGPART_EXPR uses, where the latter is cast to
5515 non-_BitInt, it will be optimized when handling
5516 the REALPART_EXPR. */
5520 lower_call (NULL_TREE
, g
);
5527 else if (TREE_CODE (TREE_TYPE (lhs
)) == BITINT_TYPE
5528 && bitint_precision_kind (TREE_TYPE (lhs
)) >= bitint_prec_large
5529 && !INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
5530 && !POINTER_TYPE_P (TREE_TYPE (rhs1
))
5531 && gimple_assign_rhs_code (stmt
) == VIEW_CONVERT_EXPR
)
5533 int part
= var_to_partition (m_map
, lhs
);
5534 gcc_assert (m_vars
[part
] != NULL_TREE
);
5535 lhs
= build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (rhs1
), m_vars
[part
]);
5536 insert_before (gimple_build_assign (lhs
, rhs1
));
5540 if (gimple_store_p (stmt
))
5542 tree rhs1
= gimple_assign_rhs1 (stmt
);
5543 if (TREE_CODE (rhs1
) == SSA_NAME
5545 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (rhs1
))))
5547 gimple
*g
= SSA_NAME_DEF_STMT (rhs1
);
5548 m_loc
= gimple_location (g
);
5549 lhs
= gimple_assign_lhs (stmt
);
5550 if (is_gimple_assign (g
) && !mergeable_op (g
))
5551 switch (gimple_assign_rhs_code (g
))
5555 lower_shift_stmt (lhs
, g
);
5557 m_gsi
= gsi_for_stmt (stmt
);
5558 unlink_stmt_vdef (stmt
);
5559 release_ssa_name (gimple_vdef (stmt
));
5560 gsi_remove (&m_gsi
, true);
5563 case TRUNC_DIV_EXPR
:
5564 case EXACT_DIV_EXPR
:
5565 case TRUNC_MOD_EXPR
:
5566 lower_muldiv_stmt (lhs
, g
);
5568 case FIX_TRUNC_EXPR
:
5569 lower_float_conv_stmt (lhs
, g
);
5573 lower_cplxpart_stmt (lhs
, g
);
5575 case VIEW_CONVERT_EXPR
:
5577 tree rhs1
= gimple_assign_rhs1 (g
);
5578 rhs1
= TREE_OPERAND (rhs1
, 0);
5579 if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
5580 && !POINTER_TYPE_P (TREE_TYPE (rhs1
)))
5582 tree ltype
= TREE_TYPE (rhs1
);
5583 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (lhs
));
5585 = build_qualified_type (ltype
,
5586 TYPE_QUALS (TREE_TYPE (lhs
))
5587 | ENCODE_QUAL_ADDR_SPACE (as
));
5588 lhs
= build1 (VIEW_CONVERT_EXPR
, ltype
, lhs
);
5589 gimple_assign_set_lhs (stmt
, lhs
);
5590 gimple_assign_set_rhs1 (stmt
, rhs1
);
5591 gimple_assign_set_rhs_code (stmt
, TREE_CODE (rhs1
));
5600 else if (optimizable_arith_overflow (g
) == 3)
5602 lower_call (lhs
, g
);
5605 m_loc
= gimple_location (stmt
);
5608 if (mergeable_op (stmt
)
5609 || gimple_store_p (stmt
)
5610 || gimple_assign_load_p (stmt
)
5613 || (is_gimple_assign (stmt
)
5614 && gimple_assign_rhs_code (stmt
) == PAREN_EXPR
))
5616 lhs
= lower_mergeable_stmt (stmt
, cmp_code
, cmp_op1
, cmp_op2
);
5620 else if (cmp_code
!= ERROR_MARK
)
5621 lhs
= lower_comparison_stmt (stmt
, cmp_code
, cmp_op1
, cmp_op2
);
5622 if (cmp_code
!= ERROR_MARK
)
5624 if (gimple_code (stmt
) == GIMPLE_COND
)
5626 gcond
*cstmt
= as_a
<gcond
*> (stmt
);
5627 gimple_cond_set_lhs (cstmt
, lhs
);
5628 gimple_cond_set_rhs (cstmt
, boolean_false_node
);
5629 gimple_cond_set_code (cstmt
, cmp_code
);
5633 if (gimple_assign_rhs_code (stmt
) == COND_EXPR
)
5635 tree cond
= build2 (cmp_code
, boolean_type_node
, lhs
,
5636 boolean_false_node
);
5637 gimple_assign_set_rhs1 (stmt
, cond
);
5638 lhs
= gimple_assign_lhs (stmt
);
5639 gcc_assert (TREE_CODE (TREE_TYPE (lhs
)) != BITINT_TYPE
5640 || (bitint_precision_kind (TREE_TYPE (lhs
))
5641 <= bitint_prec_middle
));
5645 gimple_assign_set_rhs1 (stmt
, lhs
);
5646 gimple_assign_set_rhs2 (stmt
, boolean_false_node
);
5647 gimple_assign_set_rhs_code (stmt
, cmp_code
);
5653 tree lhs_type
= TREE_TYPE (lhs
);
5654 /* Add support for 3 or more limbs filled in from normal integral
5655 type if this assert fails. If no target chooses limb mode smaller
5656 than half of largest supported normal integral type, this will not
5658 gcc_assert (TYPE_PRECISION (lhs_type
) <= 2 * limb_prec
);
5660 if ((TREE_CODE (lhs_type
) == BITINT_TYPE
5661 && bitint_precision_kind (lhs_type
) == bitint_prec_middle
)
5662 || POINTER_TYPE_P (lhs_type
))
5663 lhs_type
= build_nonstandard_integer_type (TYPE_PRECISION (lhs_type
),
5664 TYPE_UNSIGNED (lhs_type
));
5666 tree rhs1
= gimple_assign_rhs1 (stmt
);
5667 tree r1
= handle_operand (rhs1
, size_int (0));
5668 if (!useless_type_conversion_p (lhs_type
, TREE_TYPE (r1
)))
5669 r1
= add_cast (lhs_type
, r1
);
5670 if (TYPE_PRECISION (lhs_type
) > limb_prec
)
5674 tree r2
= handle_operand (rhs1
, size_int (1));
5675 r2
= add_cast (lhs_type
, r2
);
5676 g
= gimple_build_assign (make_ssa_name (lhs_type
), LSHIFT_EXPR
, r2
,
5677 build_int_cst (unsigned_type_node
,
5680 g
= gimple_build_assign (make_ssa_name (lhs_type
), BIT_IOR_EXPR
, r1
,
5681 gimple_assign_lhs (g
));
5683 r1
= gimple_assign_lhs (g
);
5685 if (lhs_type
!= TREE_TYPE (lhs
))
5686 g
= gimple_build_assign (lhs
, NOP_EXPR
, r1
);
5688 g
= gimple_build_assign (lhs
, r1
);
5689 gsi_replace (&m_gsi
, g
, true);
5692 if (is_gimple_assign (stmt
))
5693 switch (gimple_assign_rhs_code (stmt
))
5697 lower_shift_stmt (NULL_TREE
, stmt
);
5700 case TRUNC_DIV_EXPR
:
5701 case EXACT_DIV_EXPR
:
5702 case TRUNC_MOD_EXPR
:
5703 lower_muldiv_stmt (NULL_TREE
, stmt
);
5705 case FIX_TRUNC_EXPR
:
5707 lower_float_conv_stmt (NULL_TREE
, stmt
);
5711 lower_cplxpart_stmt (NULL_TREE
, stmt
);
5714 lower_complexexpr_stmt (stmt
);
5722 /* Helper for walk_non_aliased_vuses. Determine if we arrived at
5723 the desired memory state. */
5726 vuse_eq (ao_ref
*, tree vuse1
, void *data
)
5728 tree vuse2
= (tree
) data
;
5735 /* Return true if STMT uses a library function and needs to take
5736 address of its inputs. We need to avoid bit-fields in those
5737 cases. Similarly, we need to avoid overlap between destination
5738 and source limb arrays. */
5741 stmt_needs_operand_addr (gimple
*stmt
)
5743 if (is_gimple_assign (stmt
))
5744 switch (gimple_assign_rhs_code (stmt
))
5747 case TRUNC_DIV_EXPR
:
5748 case EXACT_DIV_EXPR
:
5749 case TRUNC_MOD_EXPR
:
5755 else if (gimple_call_internal_p (stmt
, IFN_MUL_OVERFLOW
)
5756 || gimple_call_internal_p (stmt
, IFN_UBSAN_CHECK_MUL
))
5761 /* Dominator walker used to discover which large/huge _BitInt
5762 loads could be sunk into all their uses. */
5764 class bitint_dom_walker
: public dom_walker
5767 bitint_dom_walker (bitmap names
, bitmap loads
)
5768 : dom_walker (CDI_DOMINATORS
), m_names (names
), m_loads (loads
) {}
5770 edge
before_dom_children (basic_block
) final override
;
5773 bitmap m_names
, m_loads
;
5777 bitint_dom_walker::before_dom_children (basic_block bb
)
5779 gphi
*phi
= get_virtual_phi (bb
);
5782 vop
= gimple_phi_result (phi
);
5783 else if (bb
== ENTRY_BLOCK_PTR_FOR_FN (cfun
))
5786 vop
= (tree
) get_immediate_dominator (CDI_DOMINATORS
, bb
)->aux
;
5788 auto_vec
<tree
, 16> worklist
;
5789 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
5790 !gsi_end_p (gsi
); gsi_next (&gsi
))
5792 gimple
*stmt
= gsi_stmt (gsi
);
5793 if (is_gimple_debug (stmt
))
5796 if (!vop
&& gimple_vuse (stmt
))
5797 vop
= gimple_vuse (stmt
);
5800 if (gimple_vdef (stmt
))
5801 vop
= gimple_vdef (stmt
);
5803 tree lhs
= gimple_get_lhs (stmt
);
5805 && TREE_CODE (lhs
) == SSA_NAME
5806 && TREE_CODE (TREE_TYPE (lhs
)) == BITINT_TYPE
5807 && bitint_precision_kind (TREE_TYPE (lhs
)) >= bitint_prec_large
5808 && !bitmap_bit_p (m_names
, SSA_NAME_VERSION (lhs
)))
5809 /* If lhs of stmt is large/huge _BitInt SSA_NAME not in m_names,
5810 it means it will be handled in a loop or straight line code
5811 at the location of its (ultimate) immediate use, so for
5812 vop checking purposes check these only at the ultimate
5817 use_operand_p use_p
;
5818 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, oi
, SSA_OP_USE
)
5820 tree s
= USE_FROM_PTR (use_p
);
5821 if (TREE_CODE (TREE_TYPE (s
)) == BITINT_TYPE
5822 && bitint_precision_kind (TREE_TYPE (s
)) >= bitint_prec_large
)
5823 worklist
.safe_push (s
);
5826 bool needs_operand_addr
= stmt_needs_operand_addr (stmt
);
5827 while (worklist
.length () > 0)
5829 tree s
= worklist
.pop ();
5831 if (!bitmap_bit_p (m_names
, SSA_NAME_VERSION (s
)))
5833 gimple
*g
= SSA_NAME_DEF_STMT (s
);
5834 needs_operand_addr
|= stmt_needs_operand_addr (g
);
5835 FOR_EACH_SSA_USE_OPERAND (use_p
, g
, oi
, SSA_OP_USE
)
5837 tree s2
= USE_FROM_PTR (use_p
);
5838 if (TREE_CODE (TREE_TYPE (s2
)) == BITINT_TYPE
5839 && (bitint_precision_kind (TREE_TYPE (s2
))
5840 >= bitint_prec_large
))
5841 worklist
.safe_push (s2
);
5845 if (!SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s
)
5846 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (s
)))
5848 tree rhs
= gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s
));
5849 if (TREE_CODE (rhs
) == SSA_NAME
5850 && bitmap_bit_p (m_loads
, SSA_NAME_VERSION (rhs
)))
5855 else if (!bitmap_bit_p (m_loads
, SSA_NAME_VERSION (s
)))
5858 gimple
*g
= SSA_NAME_DEF_STMT (s
);
5859 tree rhs1
= gimple_assign_rhs1 (g
);
5860 if (needs_operand_addr
5861 && TREE_CODE (rhs1
) == COMPONENT_REF
5862 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (rhs1
, 1)))
5864 tree fld
= TREE_OPERAND (rhs1
, 1);
5865 /* For little-endian, we can allow as inputs bit-fields
5866 which start at a limb boundary. */
5867 if (DECL_OFFSET_ALIGN (fld
) >= TYPE_ALIGN (TREE_TYPE (rhs1
))
5868 && tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld
))
5869 && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
))
5874 bitmap_clear_bit (m_loads
, SSA_NAME_VERSION (s
));
5880 ao_ref_init (&ref
, rhs1
);
5881 tree lvop
= gimple_vuse (g
);
5882 unsigned limit
= 64;
5885 && is_gimple_assign (stmt
)
5886 && gimple_store_p (stmt
)
5887 && (needs_operand_addr
5888 || !operand_equal_p (lhs
, gimple_assign_rhs1 (g
), 0)))
5891 && walk_non_aliased_vuses (&ref
, vuse
, false, vuse_eq
,
5892 NULL
, NULL
, limit
, lvop
) == NULL
)
5893 bitmap_clear_bit (m_loads
, SSA_NAME_VERSION (s
));
5897 bb
->aux
= (void *) vop
;
5903 /* Replacement for normal processing of STMT in tree-ssa-coalesce.cc
5904 build_ssa_conflict_graph.
5905 The differences are:
5906 1) don't process assignments with large/huge _BitInt lhs not in NAMES
5907 2) for large/huge _BitInt multiplication/division/modulo process def
5908 only after processing uses rather than before to make uses conflict
5910 3) for large/huge _BitInt uses not in NAMES mark the uses of their
5911 SSA_NAME_DEF_STMT (recursively), because those uses will be sunk into
5912 the final statement. */
5915 build_bitint_stmt_ssa_conflicts (gimple
*stmt
, live_track
*live
,
5916 ssa_conflicts
*graph
, bitmap names
,
5917 void (*def
) (live_track
*, tree
,
5919 void (*use
) (live_track
*, tree
))
5921 bool muldiv_p
= false;
5922 tree lhs
= NULL_TREE
;
5923 if (is_gimple_assign (stmt
))
5925 lhs
= gimple_assign_lhs (stmt
);
5926 if (TREE_CODE (lhs
) == SSA_NAME
)
5928 tree type
= TREE_TYPE (lhs
);
5929 if (TREE_CODE (type
) == COMPLEX_TYPE
)
5930 type
= TREE_TYPE (type
);
5931 if (TREE_CODE (type
) == BITINT_TYPE
5932 && bitint_precision_kind (type
) >= bitint_prec_large
)
5934 if (!bitmap_bit_p (names
, SSA_NAME_VERSION (lhs
)))
5936 switch (gimple_assign_rhs_code (stmt
))
5939 case TRUNC_DIV_EXPR
:
5940 case EXACT_DIV_EXPR
:
5941 case TRUNC_MOD_EXPR
:
5954 /* For stmts with more than one SSA_NAME definition pretend all the
5955 SSA_NAME outputs but the first one are live at this point, so
5956 that conflicts are added in between all those even when they are
5957 actually not really live after the asm, because expansion might
5958 copy those into pseudos after the asm and if multiple outputs
5959 share the same partition, it might overwrite those that should
5961 asm volatile (".." : "=r" (a) : "=r" (b) : "0" (a), "1" (a));
5965 FOR_EACH_SSA_TREE_OPERAND (var
, stmt
, iter
, SSA_OP_DEF
)
5971 FOR_EACH_SSA_TREE_OPERAND (var
, stmt
, iter
, SSA_OP_DEF
)
5972 def (live
, var
, graph
);
5975 auto_vec
<tree
, 16> worklist
;
5976 FOR_EACH_SSA_TREE_OPERAND (var
, stmt
, iter
, SSA_OP_USE
)
5978 tree type
= TREE_TYPE (var
);
5979 if (TREE_CODE (type
) == COMPLEX_TYPE
)
5980 type
= TREE_TYPE (type
);
5981 if (TREE_CODE (type
) == BITINT_TYPE
5982 && bitint_precision_kind (type
) >= bitint_prec_large
)
5984 if (bitmap_bit_p (names
, SSA_NAME_VERSION (var
)))
5987 worklist
.safe_push (var
);
5991 while (worklist
.length () > 0)
5993 tree s
= worklist
.pop ();
5994 FOR_EACH_SSA_TREE_OPERAND (var
, SSA_NAME_DEF_STMT (s
), iter
, SSA_OP_USE
)
5996 tree type
= TREE_TYPE (var
);
5997 if (TREE_CODE (type
) == COMPLEX_TYPE
)
5998 type
= TREE_TYPE (type
);
5999 if (TREE_CODE (type
) == BITINT_TYPE
6000 && bitint_precision_kind (type
) >= bitint_prec_large
)
6002 if (bitmap_bit_p (names
, SSA_NAME_VERSION (var
)))
6005 worklist
.safe_push (var
);
6011 def (live
, lhs
, graph
);
6014 /* If STMT is .{ADD,SUB,MUL}_OVERFLOW with INTEGER_CST arguments,
6015 return the largest bitint_prec_kind of them, otherwise return
6016 bitint_prec_small. */
6018 static bitint_prec_kind
6019 arith_overflow_arg_kind (gimple
*stmt
)
6021 bitint_prec_kind ret
= bitint_prec_small
;
6022 if (is_gimple_call (stmt
) && gimple_call_internal_p (stmt
))
6023 switch (gimple_call_internal_fn (stmt
))
6025 case IFN_ADD_OVERFLOW
:
6026 case IFN_SUB_OVERFLOW
:
6027 case IFN_MUL_OVERFLOW
:
6028 for (int i
= 0; i
< 2; ++i
)
6030 tree a
= gimple_call_arg (stmt
, i
);
6031 if (TREE_CODE (a
) == INTEGER_CST
6032 && TREE_CODE (TREE_TYPE (a
)) == BITINT_TYPE
)
6034 bitint_prec_kind kind
= bitint_precision_kind (TREE_TYPE (a
));
6035 ret
= MAX (ret
, kind
);
6045 /* Entry point for _BitInt(N) operation lowering during optimization. */
6048 gimple_lower_bitint (void)
6050 small_max_prec
= mid_min_prec
= large_min_prec
= huge_min_prec
= 0;
6054 for (i
= 0; i
< num_ssa_names
; ++i
)
6056 tree s
= ssa_name (i
);
6059 tree type
= TREE_TYPE (s
);
6060 if (TREE_CODE (type
) == COMPLEX_TYPE
)
6062 if (arith_overflow_arg_kind (SSA_NAME_DEF_STMT (s
))
6063 != bitint_prec_small
)
6065 type
= TREE_TYPE (type
);
6067 if (TREE_CODE (type
) == BITINT_TYPE
6068 && bitint_precision_kind (type
) != bitint_prec_small
)
6070 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
6071 into memory. Such functions could have no large/huge SSA_NAMEs. */
6072 if (SSA_NAME_IS_VIRTUAL_OPERAND (s
))
6074 gimple
*g
= SSA_NAME_DEF_STMT (s
);
6075 if (is_gimple_assign (g
) && gimple_store_p (g
))
6077 tree t
= gimple_assign_rhs1 (g
);
6078 if (TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
6079 && (bitint_precision_kind (TREE_TYPE (t
))
6080 >= bitint_prec_large
))
6084 /* Similarly, e.g. with -frounding-math casts from _BitInt INTEGER_CSTs
6085 to floating point types need to be rewritten. */
6086 else if (SCALAR_FLOAT_TYPE_P (type
))
6088 gimple
*g
= SSA_NAME_DEF_STMT (s
);
6089 if (is_gimple_assign (g
) && gimple_assign_rhs_code (g
) == FLOAT_EXPR
)
6091 tree t
= gimple_assign_rhs1 (g
);
6092 if (TREE_CODE (t
) == INTEGER_CST
6093 && TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
6094 && (bitint_precision_kind (TREE_TYPE (t
))
6095 != bitint_prec_small
))
6100 if (i
== num_ssa_names
)
6104 auto_vec
<gimple
*, 4> switch_statements
;
6105 FOR_EACH_BB_FN (bb
, cfun
)
6107 if (gswitch
*swtch
= safe_dyn_cast
<gswitch
*> (*gsi_last_bb (bb
)))
6109 tree idx
= gimple_switch_index (swtch
);
6110 if (TREE_CODE (TREE_TYPE (idx
)) != BITINT_TYPE
6111 || bitint_precision_kind (TREE_TYPE (idx
)) < bitint_prec_large
)
6115 group_case_labels_stmt (swtch
);
6116 if (gimple_switch_num_labels (swtch
) == 1)
6118 single_succ_edge (bb
)->flags
|= EDGE_FALLTHRU
;
6119 gimple_stmt_iterator gsi
= gsi_for_stmt (swtch
);
6120 gsi_remove (&gsi
, true);
6123 switch_statements
.safe_push (swtch
);
6127 if (!switch_statements
.is_empty ())
6129 bool expanded
= false;
6133 FOR_EACH_VEC_ELT (switch_statements
, j
, stmt
)
6135 gswitch
*swtch
= as_a
<gswitch
*> (stmt
);
6136 tree_switch_conversion::switch_decision_tree
dt (swtch
);
6137 expanded
|= dt
.analyze_switch_statement ();
6142 free_dominance_info (CDI_DOMINATORS
);
6143 free_dominance_info (CDI_POST_DOMINATORS
);
6144 mark_virtual_operands_for_renaming (cfun
);
6145 cleanup_tree_cfg (TODO_update_ssa
);
6149 struct bitint_large_huge large_huge
;
6150 bool has_large_huge_parm_result
= false;
6151 bool has_large_huge
= false;
6152 unsigned int ret
= 0, first_large_huge
= ~0U;
6153 bool edge_insertions
= false;
6154 for (; i
< num_ssa_names
; ++i
)
6156 tree s
= ssa_name (i
);
6159 tree type
= TREE_TYPE (s
);
6160 if (TREE_CODE (type
) == COMPLEX_TYPE
)
6162 if (arith_overflow_arg_kind (SSA_NAME_DEF_STMT (s
))
6163 >= bitint_prec_large
)
6164 has_large_huge
= true;
6165 type
= TREE_TYPE (type
);
6167 if (TREE_CODE (type
) == BITINT_TYPE
6168 && bitint_precision_kind (type
) >= bitint_prec_large
)
6170 if (first_large_huge
== ~0U)
6171 first_large_huge
= i
;
6172 gimple
*stmt
= SSA_NAME_DEF_STMT (s
), *g
;
6173 gimple_stmt_iterator gsi
;
6175 /* Unoptimize certain constructs to simpler alternatives to
6176 avoid having to lower all of them. */
6177 if (is_gimple_assign (stmt
) && gimple_bb (stmt
))
6178 switch (rhs_code
= gimple_assign_rhs_code (stmt
))
6183 case TRUNC_DIV_EXPR
:
6184 case EXACT_DIV_EXPR
:
6185 case TRUNC_MOD_EXPR
:
6186 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s
))
6188 location_t loc
= gimple_location (stmt
);
6189 gsi
= gsi_for_stmt (stmt
);
6190 tree rhs1
= gimple_assign_rhs1 (stmt
);
6191 tree rhs2
= gimple_assign_rhs2 (stmt
);
6192 /* For multiplication and division with (ab)
6193 lhs and one or both operands force the operands
6194 into new SSA_NAMEs to avoid coalescing failures. */
6195 if (TREE_CODE (rhs1
) == SSA_NAME
6196 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1
))
6198 first_large_huge
= 0;
6199 tree t
= make_ssa_name (TREE_TYPE (rhs1
));
6200 g
= gimple_build_assign (t
, SSA_NAME
, rhs1
);
6201 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
6202 gimple_set_location (g
, loc
);
6203 gimple_assign_set_rhs1 (stmt
, t
);
6206 gimple_assign_set_rhs2 (stmt
, t
);
6211 if (TREE_CODE (rhs2
) == SSA_NAME
6212 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs2
))
6214 first_large_huge
= 0;
6215 tree t
= make_ssa_name (TREE_TYPE (rhs2
));
6216 g
= gimple_build_assign (t
, SSA_NAME
, rhs2
);
6217 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
6218 gimple_set_location (g
, loc
);
6219 gimple_assign_set_rhs2 (stmt
, t
);
6227 first_large_huge
= 0;
6228 location_t loc
= gimple_location (stmt
);
6229 gsi
= gsi_for_stmt (stmt
);
6230 tree rhs1
= gimple_assign_rhs1 (stmt
);
6231 tree type
= TREE_TYPE (rhs1
);
6232 tree n
= gimple_assign_rhs2 (stmt
), m
;
6233 tree p
= build_int_cst (TREE_TYPE (n
),
6234 TYPE_PRECISION (type
));
6235 if (TREE_CODE (n
) == INTEGER_CST
)
6237 if (integer_zerop (n
))
6240 m
= fold_build2 (MINUS_EXPR
, TREE_TYPE (n
), p
, n
);
6244 tree tem
= make_ssa_name (TREE_TYPE (n
));
6245 g
= gimple_build_assign (tem
, MINUS_EXPR
, p
, n
);
6246 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
6247 gimple_set_location (g
, loc
);
6248 m
= make_ssa_name (TREE_TYPE (n
));
6249 g
= gimple_build_assign (m
, TRUNC_MOD_EXPR
, tem
, p
);
6250 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
6251 gimple_set_location (g
, loc
);
6253 if (!TYPE_UNSIGNED (type
))
6255 tree utype
= build_bitint_type (TYPE_PRECISION (type
),
6257 if (TREE_CODE (rhs1
) == INTEGER_CST
)
6258 rhs1
= fold_convert (utype
, rhs1
);
6261 tree t
= make_ssa_name (type
);
6262 g
= gimple_build_assign (t
, NOP_EXPR
, rhs1
);
6263 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
6264 gimple_set_location (g
, loc
);
6267 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1
)),
6268 rhs_code
== LROTATE_EXPR
6269 ? LSHIFT_EXPR
: RSHIFT_EXPR
,
6271 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
6272 gimple_set_location (g
, loc
);
6273 tree op1
= gimple_assign_lhs (g
);
6274 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1
)),
6275 rhs_code
== LROTATE_EXPR
6276 ? RSHIFT_EXPR
: LSHIFT_EXPR
,
6278 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
6279 gimple_set_location (g
, loc
);
6280 tree op2
= gimple_assign_lhs (g
);
6281 tree lhs
= gimple_assign_lhs (stmt
);
6282 if (!TYPE_UNSIGNED (type
))
6284 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (op1
)),
6285 BIT_IOR_EXPR
, op1
, op2
);
6286 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
6287 gimple_set_location (g
, loc
);
6288 g
= gimple_build_assign (lhs
, NOP_EXPR
,
6289 gimple_assign_lhs (g
));
6292 g
= gimple_build_assign (lhs
, BIT_IOR_EXPR
, op1
, op2
);
6293 gsi_replace (&gsi
, g
, true);
6294 gimple_set_location (g
, loc
);
6302 first_large_huge
= 0;
6303 gsi
= gsi_for_stmt (stmt
);
6304 tree lhs
= gimple_assign_lhs (stmt
);
6305 tree rhs1
= gimple_assign_rhs1 (stmt
), rhs2
= NULL_TREE
;
6306 location_t loc
= gimple_location (stmt
);
6307 if (rhs_code
== ABS_EXPR
)
6308 g
= gimple_build_cond (LT_EXPR
, rhs1
,
6309 build_zero_cst (TREE_TYPE (rhs1
)),
6310 NULL_TREE
, NULL_TREE
);
6311 else if (rhs_code
== ABSU_EXPR
)
6313 rhs2
= make_ssa_name (TREE_TYPE (lhs
));
6314 g
= gimple_build_assign (rhs2
, NOP_EXPR
, rhs1
);
6315 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
6316 gimple_set_location (g
, loc
);
6317 g
= gimple_build_cond (LT_EXPR
, rhs1
,
6318 build_zero_cst (TREE_TYPE (rhs1
)),
6319 NULL_TREE
, NULL_TREE
);
6322 else if (rhs_code
== MIN_EXPR
|| rhs_code
== MAX_EXPR
)
6324 rhs2
= gimple_assign_rhs2 (stmt
);
6325 if (TREE_CODE (rhs1
) == INTEGER_CST
)
6326 std::swap (rhs1
, rhs2
);
6327 g
= gimple_build_cond (LT_EXPR
, rhs1
, rhs2
,
6328 NULL_TREE
, NULL_TREE
);
6329 if (rhs_code
== MAX_EXPR
)
6330 std::swap (rhs1
, rhs2
);
6334 g
= gimple_build_cond (NE_EXPR
, rhs1
,
6335 build_zero_cst (TREE_TYPE (rhs1
)),
6336 NULL_TREE
, NULL_TREE
);
6337 rhs1
= gimple_assign_rhs2 (stmt
);
6338 rhs2
= gimple_assign_rhs3 (stmt
);
6340 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
6341 gimple_set_location (g
, loc
);
6342 edge e1
= split_block (gsi_bb (gsi
), g
);
6343 edge e2
= split_block (e1
->dest
, (gimple
*) NULL
);
6344 edge e3
= make_edge (e1
->src
, e2
->dest
, EDGE_FALSE_VALUE
);
6345 e3
->probability
= profile_probability::even ();
6346 e1
->flags
= EDGE_TRUE_VALUE
;
6347 e1
->probability
= e3
->probability
.invert ();
6348 if (dom_info_available_p (CDI_DOMINATORS
))
6349 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e1
->src
);
6350 if (rhs_code
== ABS_EXPR
|| rhs_code
== ABSU_EXPR
)
6352 gsi
= gsi_after_labels (e1
->dest
);
6353 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1
)),
6355 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
6356 gimple_set_location (g
, loc
);
6357 rhs2
= gimple_assign_lhs (g
);
6358 std::swap (rhs1
, rhs2
);
6360 gsi
= gsi_for_stmt (stmt
);
6361 gsi_remove (&gsi
, true);
6362 gphi
*phi
= create_phi_node (lhs
, e2
->dest
);
6363 add_phi_arg (phi
, rhs1
, e2
, UNKNOWN_LOCATION
);
6364 add_phi_arg (phi
, rhs2
, e3
, UNKNOWN_LOCATION
);
6368 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
6369 into memory. Such functions could have no large/huge SSA_NAMEs. */
6370 else if (SSA_NAME_IS_VIRTUAL_OPERAND (s
))
6372 gimple
*g
= SSA_NAME_DEF_STMT (s
);
6373 if (is_gimple_assign (g
) && gimple_store_p (g
))
6375 tree t
= gimple_assign_rhs1 (g
);
6376 if (TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
6377 && (bitint_precision_kind (TREE_TYPE (t
))
6378 >= bitint_prec_large
))
6379 has_large_huge
= true;
6382 /* Similarly, e.g. with -frounding-math casts from _BitInt INTEGER_CSTs
6383 to floating point types need to be rewritten. */
6384 else if (SCALAR_FLOAT_TYPE_P (type
))
6386 gimple
*g
= SSA_NAME_DEF_STMT (s
);
6387 if (is_gimple_assign (g
) && gimple_assign_rhs_code (g
) == FLOAT_EXPR
)
6389 tree t
= gimple_assign_rhs1 (g
);
6390 if (TREE_CODE (t
) == INTEGER_CST
6391 && TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
6392 && (bitint_precision_kind (TREE_TYPE (t
))
6393 >= bitint_prec_large
))
6394 has_large_huge
= true;
6398 for (i
= first_large_huge
; i
< num_ssa_names
; ++i
)
6400 tree s
= ssa_name (i
);
6403 tree type
= TREE_TYPE (s
);
6404 if (TREE_CODE (type
) == COMPLEX_TYPE
)
6405 type
= TREE_TYPE (type
);
6406 if (TREE_CODE (type
) == BITINT_TYPE
6407 && bitint_precision_kind (type
) >= bitint_prec_large
)
6409 use_operand_p use_p
;
6411 has_large_huge
= true;
6413 && optimizable_arith_overflow (SSA_NAME_DEF_STMT (s
)))
6415 /* Ignore large/huge _BitInt SSA_NAMEs which have single use in
6416 the same bb and could be handled in the same loop with the
6419 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s
)
6420 && single_imm_use (s
, &use_p
, &use_stmt
)
6421 && gimple_bb (SSA_NAME_DEF_STMT (s
)) == gimple_bb (use_stmt
))
6423 if (mergeable_op (SSA_NAME_DEF_STMT (s
)))
6425 if (mergeable_op (use_stmt
))
6427 tree_code cmp_code
= comparison_op (use_stmt
, NULL
, NULL
);
6428 if (cmp_code
== EQ_EXPR
|| cmp_code
== NE_EXPR
)
6430 if (gimple_assign_cast_p (use_stmt
))
6432 tree lhs
= gimple_assign_lhs (use_stmt
);
6433 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
6434 /* Don't merge with VIEW_CONVERT_EXPRs to
6435 huge INTEGER_TYPEs used sometimes in memcpy
6437 && (TREE_CODE (TREE_TYPE (lhs
)) != INTEGER_TYPE
6438 || (TYPE_PRECISION (TREE_TYPE (lhs
))
6439 <= MAX_FIXED_MODE_SIZE
)))
6442 else if (gimple_store_p (use_stmt
)
6443 && is_gimple_assign (use_stmt
)
6444 && !gimple_has_volatile_ops (use_stmt
)
6445 && !stmt_ends_bb_p (use_stmt
))
6448 if (gimple_assign_cast_p (SSA_NAME_DEF_STMT (s
)))
6450 tree rhs1
= gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s
));
6451 if (TREE_CODE (rhs1
) == VIEW_CONVERT_EXPR
)
6453 rhs1
= TREE_OPERAND (rhs1
, 0);
6454 if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
6455 && !POINTER_TYPE_P (TREE_TYPE (rhs1
))
6456 && gimple_store_p (use_stmt
))
6459 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
6460 && ((is_gimple_assign (use_stmt
)
6461 && (gimple_assign_rhs_code (use_stmt
)
6463 || gimple_code (use_stmt
) == GIMPLE_COND
)
6464 && (!gimple_store_p (use_stmt
)
6465 || (is_gimple_assign (use_stmt
)
6466 && !gimple_has_volatile_ops (use_stmt
)
6467 && !stmt_ends_bb_p (use_stmt
)))
6468 && (TREE_CODE (rhs1
) != SSA_NAME
6469 || !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1
)))
6471 if (is_gimple_assign (use_stmt
))
6472 switch (gimple_assign_rhs_code (use_stmt
))
6474 case TRUNC_DIV_EXPR
:
6475 case EXACT_DIV_EXPR
:
6476 case TRUNC_MOD_EXPR
:
6478 /* For division, modulo and casts to floating
6479 point, avoid representing unsigned operands
6480 using negative prec if they were sign-extended
6481 from narrower precision. */
6482 if (TYPE_UNSIGNED (TREE_TYPE (s
))
6483 && !TYPE_UNSIGNED (TREE_TYPE (rhs1
))
6484 && (TYPE_PRECISION (TREE_TYPE (s
))
6485 > TYPE_PRECISION (TREE_TYPE (rhs1
))))
6489 if (TREE_CODE (TREE_TYPE (rhs1
)) != BITINT_TYPE
6490 || (bitint_precision_kind (TREE_TYPE (rhs1
))
6491 < bitint_prec_large
))
6493 /* Uses which use handle_operand_addr can't
6494 deal with nested casts. */
6495 if (TREE_CODE (rhs1
) == SSA_NAME
6496 && gimple_assign_cast_p
6497 (SSA_NAME_DEF_STMT (rhs1
))
6498 && has_single_use (rhs1
)
6499 && (gimple_bb (SSA_NAME_DEF_STMT (rhs1
))
6500 == gimple_bb (SSA_NAME_DEF_STMT (s
))))
6503 case VIEW_CONVERT_EXPR
:
6505 tree lhs
= gimple_assign_lhs (use_stmt
);
6506 /* Don't merge with VIEW_CONVERT_EXPRs to
6507 non-integral types. */
6508 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs
)))
6510 /* Don't merge with VIEW_CONVERT_EXPRs to
6511 huge INTEGER_TYPEs used sometimes in memcpy
6513 if (TREE_CODE (TREE_TYPE (lhs
)) == INTEGER_TYPE
6514 && (TYPE_PRECISION (TREE_TYPE (lhs
))
6515 > MAX_FIXED_MODE_SIZE
))
6522 if (TREE_CODE (TREE_TYPE (rhs1
)) != BITINT_TYPE
6523 || (bitint_precision_kind (TREE_TYPE (rhs1
))
6524 < bitint_prec_large
))
6526 if ((TYPE_PRECISION (TREE_TYPE (rhs1
))
6527 >= TYPE_PRECISION (TREE_TYPE (s
)))
6528 && mergeable_op (use_stmt
))
6530 /* Prevent merging a widening non-mergeable cast
6531 on result of some narrower mergeable op
6532 together with later mergeable operations. E.g.
6533 result of _BitInt(223) addition shouldn't be
6534 sign-extended to _BitInt(513) and have another
6535 _BitInt(513) added to it, as handle_plus_minus
6536 with its PHI node handling inside of handle_cast
6537 will not work correctly. An exception is if
6538 use_stmt is a store, this is handled directly
6539 in lower_mergeable_stmt. */
6540 if (TREE_CODE (rhs1
) != SSA_NAME
6541 || !has_single_use (rhs1
)
6542 || (gimple_bb (SSA_NAME_DEF_STMT (rhs1
))
6543 != gimple_bb (SSA_NAME_DEF_STMT (s
)))
6544 || !mergeable_op (SSA_NAME_DEF_STMT (rhs1
))
6545 || gimple_store_p (use_stmt
))
6547 if ((TYPE_PRECISION (TREE_TYPE (rhs1
))
6548 < TYPE_PRECISION (TREE_TYPE (s
)))
6549 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (rhs1
)))
6551 /* Another exception is if the widening cast is
6552 from mergeable same precision cast from something
6555 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (rhs1
));
6556 if (TREE_CODE (TREE_TYPE (rhs2
)) == BITINT_TYPE
6557 && (TYPE_PRECISION (TREE_TYPE (rhs1
))
6558 == TYPE_PRECISION (TREE_TYPE (rhs2
))))
6560 if (TREE_CODE (rhs2
) != SSA_NAME
6561 || !has_single_use (rhs2
)
6562 || (gimple_bb (SSA_NAME_DEF_STMT (rhs2
))
6563 != gimple_bb (SSA_NAME_DEF_STMT (s
)))
6564 || !mergeable_op (SSA_NAME_DEF_STMT (rhs2
)))
6570 if (is_gimple_assign (SSA_NAME_DEF_STMT (s
)))
6571 switch (gimple_assign_rhs_code (SSA_NAME_DEF_STMT (s
)))
6575 tree rhs1
= gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s
));
6576 rhs1
= TREE_OPERAND (rhs1
, 0);
6577 if (TREE_CODE (rhs1
) == SSA_NAME
)
6579 gimple
*g
= SSA_NAME_DEF_STMT (rhs1
);
6580 if (optimizable_arith_overflow (g
))
6588 case TRUNC_DIV_EXPR
:
6589 case EXACT_DIV_EXPR
:
6590 case TRUNC_MOD_EXPR
:
6591 case FIX_TRUNC_EXPR
:
6593 if (gimple_store_p (use_stmt
)
6594 && is_gimple_assign (use_stmt
)
6595 && !gimple_has_volatile_ops (use_stmt
)
6596 && !stmt_ends_bb_p (use_stmt
))
6598 tree lhs
= gimple_assign_lhs (use_stmt
);
6599 /* As multiply/division passes address of the lhs
6600 to library function and that assumes it can extend
6601 it to whole number of limbs, avoid merging those
6602 with bit-field stores. Don't allow it for
6603 shifts etc. either, so that the bit-field store
6604 handling doesn't have to be done everywhere. */
6605 if (TREE_CODE (lhs
) == COMPONENT_REF
6606 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs
, 1)))
6616 /* Also ignore uninitialized uses. */
6617 if (SSA_NAME_IS_DEFAULT_DEF (s
)
6618 && (!SSA_NAME_VAR (s
) || VAR_P (SSA_NAME_VAR (s
))))
6622 if (!large_huge
.m_names
)
6623 large_huge
.m_names
= BITMAP_ALLOC (NULL
);
6624 bitmap_set_bit (large_huge
.m_names
, SSA_NAME_VERSION (s
));
6625 if (has_single_use (s
))
6627 if (!large_huge
.m_single_use_names
)
6628 large_huge
.m_single_use_names
= BITMAP_ALLOC (NULL
);
6629 bitmap_set_bit (large_huge
.m_single_use_names
,
6630 SSA_NAME_VERSION (s
));
6632 if (SSA_NAME_VAR (s
)
6633 && ((TREE_CODE (SSA_NAME_VAR (s
)) == PARM_DECL
6634 && SSA_NAME_IS_DEFAULT_DEF (s
))
6635 || TREE_CODE (SSA_NAME_VAR (s
)) == RESULT_DECL
))
6636 has_large_huge_parm_result
= true;
6638 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s
)
6639 && gimple_assign_load_p (SSA_NAME_DEF_STMT (s
))
6640 && !gimple_has_volatile_ops (SSA_NAME_DEF_STMT (s
))
6641 && !stmt_ends_bb_p (SSA_NAME_DEF_STMT (s
)))
6643 use_operand_p use_p
;
6644 imm_use_iterator iter
;
6645 bool optimizable_load
= true;
6646 FOR_EACH_IMM_USE_FAST (use_p
, iter
, s
)
6648 gimple
*use_stmt
= USE_STMT (use_p
);
6649 if (is_gimple_debug (use_stmt
))
6651 if (gimple_code (use_stmt
) == GIMPLE_PHI
6652 || is_gimple_call (use_stmt
)
6653 || gimple_code (use_stmt
) == GIMPLE_ASM
6654 || (is_gimple_assign (use_stmt
)
6655 && (gimple_assign_rhs_code (use_stmt
)
6658 optimizable_load
= false;
6664 FOR_EACH_SSA_USE_OPERAND (use_p
, SSA_NAME_DEF_STMT (s
),
6667 tree s2
= USE_FROM_PTR (use_p
);
6668 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s2
))
6670 optimizable_load
= false;
6675 if (optimizable_load
&& !stmt_ends_bb_p (SSA_NAME_DEF_STMT (s
)))
6677 if (!large_huge
.m_loads
)
6678 large_huge
.m_loads
= BITMAP_ALLOC (NULL
);
6679 bitmap_set_bit (large_huge
.m_loads
, SSA_NAME_VERSION (s
));
6683 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
6684 into memory. Such functions could have no large/huge SSA_NAMEs. */
6685 else if (SSA_NAME_IS_VIRTUAL_OPERAND (s
))
6687 gimple
*g
= SSA_NAME_DEF_STMT (s
);
6688 if (is_gimple_assign (g
) && gimple_store_p (g
))
6690 tree t
= gimple_assign_rhs1 (g
);
6691 if (TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
6692 && bitint_precision_kind (TREE_TYPE (t
)) >= bitint_prec_large
)
6693 has_large_huge
= true;
6698 if (large_huge
.m_names
|| has_large_huge
)
6700 ret
= TODO_update_ssa_only_virtuals
| TODO_cleanup_cfg
;
6701 calculate_dominance_info (CDI_DOMINATORS
);
6703 enable_ranger (cfun
);
6704 if (large_huge
.m_loads
)
6706 basic_block entry
= ENTRY_BLOCK_PTR_FOR_FN (cfun
);
6708 bitint_dom_walker (large_huge
.m_names
,
6709 large_huge
.m_loads
).walk (entry
);
6710 bitmap_and_compl_into (large_huge
.m_names
, large_huge
.m_loads
);
6711 clear_aux_for_blocks ();
6712 BITMAP_FREE (large_huge
.m_loads
);
6714 large_huge
.m_limb_type
= build_nonstandard_integer_type (limb_prec
, 1);
6715 large_huge
.m_limb_size
6716 = tree_to_uhwi (TYPE_SIZE_UNIT (large_huge
.m_limb_type
));
6718 if (large_huge
.m_names
)
6721 = init_var_map (num_ssa_names
, NULL
, large_huge
.m_names
);
6722 coalesce_ssa_name (large_huge
.m_map
);
6723 partition_view_normal (large_huge
.m_map
);
6724 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6726 fprintf (dump_file
, "After Coalescing:\n");
6727 dump_var_map (dump_file
, large_huge
.m_map
);
6730 = XCNEWVEC (tree
, num_var_partitions (large_huge
.m_map
));
6732 if (has_large_huge_parm_result
)
6733 EXECUTE_IF_SET_IN_BITMAP (large_huge
.m_names
, 0, i
, bi
)
6735 tree s
= ssa_name (i
);
6736 if (SSA_NAME_VAR (s
)
6737 && ((TREE_CODE (SSA_NAME_VAR (s
)) == PARM_DECL
6738 && SSA_NAME_IS_DEFAULT_DEF (s
))
6739 || TREE_CODE (SSA_NAME_VAR (s
)) == RESULT_DECL
))
6741 int p
= var_to_partition (large_huge
.m_map
, s
);
6742 if (large_huge
.m_vars
[p
] == NULL_TREE
)
6744 large_huge
.m_vars
[p
] = SSA_NAME_VAR (s
);
6745 mark_addressable (SSA_NAME_VAR (s
));
6749 tree atype
= NULL_TREE
;
6750 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6751 fprintf (dump_file
, "Mapping SSA_NAMEs to decls:\n");
6752 EXECUTE_IF_SET_IN_BITMAP (large_huge
.m_names
, 0, i
, bi
)
6754 tree s
= ssa_name (i
);
6755 int p
= var_to_partition (large_huge
.m_map
, s
);
6756 if (large_huge
.m_vars
[p
] == NULL_TREE
)
6758 if (atype
== NULL_TREE
6759 || !tree_int_cst_equal (TYPE_SIZE (atype
),
6760 TYPE_SIZE (TREE_TYPE (s
))))
6762 unsigned HOST_WIDE_INT nelts
6763 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (s
))) / limb_prec
;
6764 atype
= build_array_type_nelts (large_huge
.m_limb_type
,
6767 large_huge
.m_vars
[p
] = create_tmp_var (atype
, "bitint");
6768 mark_addressable (large_huge
.m_vars
[p
]);
6770 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6772 print_generic_expr (dump_file
, s
, TDF_SLIM
);
6773 fprintf (dump_file
, " -> ");
6774 print_generic_expr (dump_file
, large_huge
.m_vars
[p
], TDF_SLIM
);
6775 fprintf (dump_file
, "\n");
6780 FOR_EACH_BB_REVERSE_FN (bb
, cfun
)
6782 gimple_stmt_iterator prev
;
6783 for (gimple_stmt_iterator gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
);
6789 gimple
*stmt
= gsi_stmt (gsi
);
6790 if (is_gimple_debug (stmt
))
6792 bitint_prec_kind kind
= bitint_prec_small
;
6794 FOR_EACH_SSA_TREE_OPERAND (t
, stmt
, iter
, SSA_OP_ALL_OPERANDS
)
6795 if (TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
)
6797 bitint_prec_kind this_kind
6798 = bitint_precision_kind (TREE_TYPE (t
));
6799 kind
= MAX (kind
, this_kind
);
6801 if (is_gimple_assign (stmt
) && gimple_store_p (stmt
))
6803 t
= gimple_assign_rhs1 (stmt
);
6804 if (TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
)
6806 bitint_prec_kind this_kind
6807 = bitint_precision_kind (TREE_TYPE (t
));
6808 kind
= MAX (kind
, this_kind
);
6811 if (is_gimple_assign (stmt
)
6812 && gimple_assign_rhs_code (stmt
) == FLOAT_EXPR
)
6814 t
= gimple_assign_rhs1 (stmt
);
6815 if (TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
6816 && TREE_CODE (t
) == INTEGER_CST
)
6818 bitint_prec_kind this_kind
6819 = bitint_precision_kind (TREE_TYPE (t
));
6820 kind
= MAX (kind
, this_kind
);
6823 if (is_gimple_call (stmt
))
6825 t
= gimple_call_lhs (stmt
);
6826 if (t
&& TREE_CODE (TREE_TYPE (t
)) == COMPLEX_TYPE
)
6828 bitint_prec_kind this_kind
= arith_overflow_arg_kind (stmt
);
6829 kind
= MAX (kind
, this_kind
);
6830 if (TREE_CODE (TREE_TYPE (TREE_TYPE (t
))) == BITINT_TYPE
)
6833 = bitint_precision_kind (TREE_TYPE (TREE_TYPE (t
)));
6834 kind
= MAX (kind
, this_kind
);
6838 if (kind
== bitint_prec_small
)
6840 switch (gimple_code (stmt
))
6843 /* For now. We'll need to handle some internal functions and
6844 perhaps some builtins. */
6845 if (kind
== bitint_prec_middle
)
6849 if (kind
== bitint_prec_middle
)
6855 if (gimple_clobber_p (stmt
))
6857 if (kind
>= bitint_prec_large
)
6859 if (gimple_assign_single_p (stmt
))
6860 /* No need to lower copies, loads or stores. */
6862 if (gimple_assign_cast_p (stmt
))
6864 tree lhs
= gimple_assign_lhs (stmt
);
6865 tree rhs
= gimple_assign_rhs1 (stmt
);
6866 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
6867 && INTEGRAL_TYPE_P (TREE_TYPE (rhs
))
6868 && (TYPE_PRECISION (TREE_TYPE (lhs
))
6869 == TYPE_PRECISION (TREE_TYPE (rhs
))))
6870 /* No need to lower casts to same precision. */
6878 if (kind
== bitint_prec_middle
)
6880 tree type
= NULL_TREE
;
6881 /* Middle _BitInt(N) is rewritten to casts to INTEGER_TYPEs
6882 with the same precision and back. */
6883 unsigned int nops
= gimple_num_ops (stmt
);
6884 for (unsigned int i
= is_gimple_assign (stmt
) ? 1 : 0;
6886 if (tree op
= gimple_op (stmt
, i
))
6888 tree nop
= maybe_cast_middle_bitint (&gsi
, op
, type
);
6890 gimple_set_op (stmt
, i
, nop
);
6891 else if (COMPARISON_CLASS_P (op
))
6893 TREE_OPERAND (op
, 0)
6894 = maybe_cast_middle_bitint (&gsi
,
6895 TREE_OPERAND (op
, 0),
6897 TREE_OPERAND (op
, 1)
6898 = maybe_cast_middle_bitint (&gsi
,
6899 TREE_OPERAND (op
, 1),
6902 else if (TREE_CODE (op
) == CASE_LABEL_EXPR
)
6905 = maybe_cast_middle_bitint (&gsi
, CASE_LOW (op
),
6908 = maybe_cast_middle_bitint (&gsi
, CASE_HIGH (op
),
6912 if (tree lhs
= gimple_get_lhs (stmt
))
6913 if (TREE_CODE (TREE_TYPE (lhs
)) == BITINT_TYPE
6914 && (bitint_precision_kind (TREE_TYPE (lhs
))
6915 == bitint_prec_middle
))
6917 int prec
= TYPE_PRECISION (TREE_TYPE (lhs
));
6918 int uns
= TYPE_UNSIGNED (TREE_TYPE (lhs
));
6919 type
= build_nonstandard_integer_type (prec
, uns
);
6920 tree lhs2
= make_ssa_name (type
);
6921 gimple_set_lhs (stmt
, lhs2
);
6922 gimple
*g
= gimple_build_assign (lhs
, NOP_EXPR
, lhs2
);
6923 if (stmt_ends_bb_p (stmt
))
6925 edge e
= find_fallthru_edge (gsi_bb (gsi
)->succs
);
6926 gsi_insert_on_edge (e
, g
);
6927 edge_insertions
= true;
6930 gsi_insert_after (&gsi
, g
, GSI_SAME_STMT
);
6936 if (tree lhs
= gimple_get_lhs (stmt
))
6937 if (TREE_CODE (lhs
) == SSA_NAME
)
6939 tree type
= TREE_TYPE (lhs
);
6940 if (TREE_CODE (type
) == COMPLEX_TYPE
)
6941 type
= TREE_TYPE (type
);
6942 if (TREE_CODE (type
) == BITINT_TYPE
6943 && bitint_precision_kind (type
) >= bitint_prec_large
6944 && (large_huge
.m_names
== NULL
6945 || !bitmap_bit_p (large_huge
.m_names
,
6946 SSA_NAME_VERSION (lhs
))))
6950 large_huge
.lower_stmt (stmt
);
6953 tree atype
= NULL_TREE
;
6954 for (gphi_iterator gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
);
6957 gphi
*phi
= gsi
.phi ();
6958 tree lhs
= gimple_phi_result (phi
);
6959 if (TREE_CODE (TREE_TYPE (lhs
)) != BITINT_TYPE
6960 || bitint_precision_kind (TREE_TYPE (lhs
)) < bitint_prec_large
)
6962 int p1
= var_to_partition (large_huge
.m_map
, lhs
);
6963 gcc_assert (large_huge
.m_vars
[p1
] != NULL_TREE
);
6964 tree v1
= large_huge
.m_vars
[p1
];
6965 for (unsigned i
= 0; i
< gimple_phi_num_args (phi
); ++i
)
6967 tree arg
= gimple_phi_arg_def (phi
, i
);
6968 edge e
= gimple_phi_arg_edge (phi
, i
);
6970 switch (TREE_CODE (arg
))
6973 if (integer_zerop (arg
) && VAR_P (v1
))
6975 tree zero
= build_zero_cst (TREE_TYPE (v1
));
6976 g
= gimple_build_assign (v1
, zero
);
6977 gsi_insert_on_edge (e
, g
);
6978 edge_insertions
= true;
6982 unsigned int min_prec
, prec
, rem
;
6984 prec
= TYPE_PRECISION (TREE_TYPE (arg
));
6985 rem
= prec
% (2 * limb_prec
);
6986 min_prec
= bitint_min_cst_precision (arg
, ext
);
6987 if (min_prec
> prec
- rem
- 2 * limb_prec
6988 && min_prec
> (unsigned) limb_prec
)
6989 /* Constant which has enough significant bits that it
6990 isn't worth trying to save .rodata space by extending
6991 from smaller number. */
6994 min_prec
= CEIL (min_prec
, limb_prec
) * limb_prec
;
6997 else if (min_prec
== prec
)
6998 c
= tree_output_constant_def (arg
);
6999 else if (min_prec
== (unsigned) limb_prec
)
7000 c
= fold_convert (large_huge
.m_limb_type
, arg
);
7003 tree ctype
= build_bitint_type (min_prec
, 1);
7004 c
= tree_output_constant_def (fold_convert (ctype
, arg
));
7008 if (VAR_P (v1
) && min_prec
== prec
)
7010 tree v2
= build1 (VIEW_CONVERT_EXPR
,
7012 g
= gimple_build_assign (v1
, v2
);
7013 gsi_insert_on_edge (e
, g
);
7014 edge_insertions
= true;
7017 if (TREE_CODE (TREE_TYPE (c
)) == INTEGER_TYPE
)
7018 g
= gimple_build_assign (build1 (VIEW_CONVERT_EXPR
,
7023 unsigned HOST_WIDE_INT nelts
7024 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (c
)))
7027 = build_array_type_nelts (large_huge
.m_limb_type
,
7029 g
= gimple_build_assign (build1 (VIEW_CONVERT_EXPR
,
7031 build1 (VIEW_CONVERT_EXPR
,
7034 gsi_insert_on_edge (e
, g
);
7038 unsigned HOST_WIDE_INT nelts
7039 = (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (v1
)))
7040 - min_prec
) / limb_prec
;
7042 = build_array_type_nelts (large_huge
.m_limb_type
,
7044 tree ptype
= build_pointer_type (TREE_TYPE (v1
));
7047 off
= fold_convert (ptype
,
7048 TYPE_SIZE_UNIT (TREE_TYPE (c
)));
7050 off
= build_zero_cst (ptype
);
7051 tree vd
= build2 (MEM_REF
, vtype
,
7052 build_fold_addr_expr (v1
), off
);
7053 g
= gimple_build_assign (vd
, build_zero_cst (vtype
));
7060 tree ptype
= build_pointer_type (TREE_TYPE (v1
));
7062 = fold_convert (ptype
,
7063 TYPE_SIZE_UNIT (TREE_TYPE (c
)));
7064 vd
= build2 (MEM_REF
, large_huge
.m_limb_type
,
7065 build_fold_addr_expr (v1
), off
);
7067 vd
= build_fold_addr_expr (vd
);
7068 unsigned HOST_WIDE_INT nbytes
7069 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (v1
)));
7072 -= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (c
)));
7073 tree fn
= builtin_decl_implicit (BUILT_IN_MEMSET
);
7074 g
= gimple_build_call (fn
, 3, vd
,
7075 integer_minus_one_node
,
7076 build_int_cst (sizetype
,
7079 gsi_insert_on_edge (e
, g
);
7080 edge_insertions
= true;
7085 if (gimple_code (SSA_NAME_DEF_STMT (arg
)) == GIMPLE_NOP
)
7087 if (large_huge
.m_names
== NULL
7088 || !bitmap_bit_p (large_huge
.m_names
,
7089 SSA_NAME_VERSION (arg
)))
7092 int p2
= var_to_partition (large_huge
.m_map
, arg
);
7095 gcc_assert (large_huge
.m_vars
[p2
] != NULL_TREE
);
7096 tree v2
= large_huge
.m_vars
[p2
];
7097 if (VAR_P (v1
) && VAR_P (v2
))
7098 g
= gimple_build_assign (v1
, v2
);
7099 else if (VAR_P (v1
))
7100 g
= gimple_build_assign (v1
, build1 (VIEW_CONVERT_EXPR
,
7101 TREE_TYPE (v1
), v2
));
7102 else if (VAR_P (v2
))
7103 g
= gimple_build_assign (build1 (VIEW_CONVERT_EXPR
,
7104 TREE_TYPE (v2
), v1
), v2
);
7107 if (atype
== NULL_TREE
7108 || !tree_int_cst_equal (TYPE_SIZE (atype
),
7109 TYPE_SIZE (TREE_TYPE (lhs
))))
7111 unsigned HOST_WIDE_INT nelts
7112 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs
)))
7115 = build_array_type_nelts (large_huge
.m_limb_type
,
7118 g
= gimple_build_assign (build1 (VIEW_CONVERT_EXPR
,
7120 build1 (VIEW_CONVERT_EXPR
,
7123 gsi_insert_on_edge (e
, g
);
7124 edge_insertions
= true;
7131 if (large_huge
.m_names
|| has_large_huge
)
7134 for (i
= 0; i
< num_ssa_names
; ++i
)
7136 tree s
= ssa_name (i
);
7139 tree type
= TREE_TYPE (s
);
7140 if (TREE_CODE (type
) == COMPLEX_TYPE
)
7141 type
= TREE_TYPE (type
);
7142 if (TREE_CODE (type
) == BITINT_TYPE
7143 && bitint_precision_kind (type
) >= bitint_prec_large
)
7145 if (large_huge
.m_preserved
7146 && bitmap_bit_p (large_huge
.m_preserved
,
7147 SSA_NAME_VERSION (s
)))
7149 gimple
*g
= SSA_NAME_DEF_STMT (s
);
7150 if (gimple_code (g
) == GIMPLE_NOP
)
7152 if (SSA_NAME_VAR (s
))
7153 set_ssa_default_def (cfun
, SSA_NAME_VAR (s
), NULL_TREE
);
7154 release_ssa_name (s
);
7157 if (gimple_bb (g
) == NULL
)
7159 release_ssa_name (s
);
7162 if (gimple_code (g
) != GIMPLE_ASM
)
7164 gimple_stmt_iterator gsi
= gsi_for_stmt (g
);
7165 bool save_vta
= flag_var_tracking_assignments
;
7166 flag_var_tracking_assignments
= false;
7167 gsi_remove (&gsi
, true);
7168 flag_var_tracking_assignments
= save_vta
;
7171 nop
= gimple_build_nop ();
7172 SSA_NAME_DEF_STMT (s
) = nop
;
7173 release_ssa_name (s
);
7177 disable_ranger (cfun
);
7180 if (edge_insertions
)
7181 gsi_commit_edge_inserts ();
7183 /* Fix up arguments of ECF_RETURNS_TWICE calls. Those were temporarily
7184 inserted before the call, but that is invalid IL, so move them to the
7185 right place and add corresponding PHIs. */
7186 if (!large_huge
.m_returns_twice_calls
.is_empty ())
7188 auto_vec
<gimple
*, 16> arg_stmts
;
7189 while (!large_huge
.m_returns_twice_calls
.is_empty ())
7191 gimple
*stmt
= large_huge
.m_returns_twice_calls
.pop ();
7192 gimple_stmt_iterator gsi
= gsi_after_labels (gimple_bb (stmt
));
7193 while (gsi_stmt (gsi
) != stmt
)
7195 if (is_gimple_debug (gsi_stmt (gsi
)))
7199 arg_stmts
.safe_push (gsi_stmt (gsi
));
7200 gsi_remove (&gsi
, false);
7204 basic_block bb
= NULL
;
7205 edge e
= NULL
, ead
= NULL
;
7206 FOR_EACH_VEC_ELT (arg_stmts
, i
, g
)
7208 gsi_safe_insert_before (&gsi
, g
);
7211 bb
= gimple_bb (stmt
);
7212 gcc_checking_assert (EDGE_COUNT (bb
->preds
) == 2);
7213 e
= EDGE_PRED (bb
, 0);
7214 ead
= EDGE_PRED (bb
, 1);
7215 if ((ead
->flags
& EDGE_ABNORMAL
) == 0)
7217 gcc_checking_assert ((e
->flags
& EDGE_ABNORMAL
) == 0
7218 && (ead
->flags
& EDGE_ABNORMAL
));
7220 tree lhs
= gimple_assign_lhs (g
);
7222 gphi
*phi
= create_phi_node (copy_ssa_name (arg
), bb
);
7223 add_phi_arg (phi
, arg
, e
, UNKNOWN_LOCATION
);
7224 tree var
= create_tmp_reg (TREE_TYPE (arg
));
7225 suppress_warning (var
, OPT_Wuninitialized
);
7226 arg
= get_or_create_ssa_default_def (cfun
, var
);
7227 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (arg
) = 1;
7228 add_phi_arg (phi
, arg
, ead
, UNKNOWN_LOCATION
);
7229 arg
= gimple_phi_result (phi
);
7230 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (arg
) = 1;
7231 imm_use_iterator iter
;
7233 FOR_EACH_IMM_USE_STMT (use_stmt
, iter
, lhs
)
7235 if (use_stmt
== phi
)
7237 gcc_checking_assert (use_stmt
== stmt
);
7238 use_operand_p use_p
;
7239 FOR_EACH_IMM_USE_ON_STMT (use_p
, iter
)
7240 SET_USE (use_p
, arg
);
7244 arg_stmts
.truncate (0);
7253 const pass_data pass_data_lower_bitint
=
7255 GIMPLE_PASS
, /* type */
7256 "bitintlower", /* name */
7257 OPTGROUP_NONE
, /* optinfo_flags */
7258 TV_NONE
, /* tv_id */
7259 PROP_ssa
, /* properties_required */
7260 PROP_gimple_lbitint
, /* properties_provided */
7261 0, /* properties_destroyed */
7262 0, /* todo_flags_start */
7263 0, /* todo_flags_finish */
7266 class pass_lower_bitint
: public gimple_opt_pass
7269 pass_lower_bitint (gcc::context
*ctxt
)
7270 : gimple_opt_pass (pass_data_lower_bitint
, ctxt
)
7273 /* opt_pass methods: */
7274 opt_pass
* clone () final override
{ return new pass_lower_bitint (m_ctxt
); }
7275 unsigned int execute (function
*) final override
7277 return gimple_lower_bitint ();
7280 }; // class pass_lower_bitint
7285 make_pass_lower_bitint (gcc::context
*ctxt
)
7287 return new pass_lower_bitint (ctxt
);
7293 const pass_data pass_data_lower_bitint_O0
=
7295 GIMPLE_PASS
, /* type */
7296 "bitintlower0", /* name */
7297 OPTGROUP_NONE
, /* optinfo_flags */
7298 TV_NONE
, /* tv_id */
7299 PROP_cfg
, /* properties_required */
7300 PROP_gimple_lbitint
, /* properties_provided */
7301 0, /* properties_destroyed */
7302 0, /* todo_flags_start */
7303 0, /* todo_flags_finish */
7306 class pass_lower_bitint_O0
: public gimple_opt_pass
7309 pass_lower_bitint_O0 (gcc::context
*ctxt
)
7310 : gimple_opt_pass (pass_data_lower_bitint_O0
, ctxt
)
7313 /* opt_pass methods: */
7314 bool gate (function
*fun
) final override
7316 /* With errors, normal optimization passes are not run. If we don't
7317 lower bitint operations at all, rtl expansion will abort. */
7318 return !(fun
->curr_properties
& PROP_gimple_lbitint
);
7321 unsigned int execute (function
*) final override
7323 return gimple_lower_bitint ();
7326 }; // class pass_lower_bitint_O0
7331 make_pass_lower_bitint_O0 (gcc::context
*ctxt
)
7333 return new pass_lower_bitint_O0 (ctxt
);