1 /* Lower _BitInt(N) operations to scalar operations.
2 Copyright (C) 2023-2024 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
29 #include "tree-pass.h"
31 #include "fold-const.h"
33 #include "gimple-iterator.h"
39 #include "tree-ssa-live.h"
40 #include "tree-ssa-coalesce.h"
45 #include "gimple-range.h"
46 #include "value-range.h"
47 #include "langhooks.h"
48 #include "gimplify-me.h"
49 #include "diagnostic-core.h"
51 #include "tree-pretty-print.h"
52 #include "alloc-pool.h"
53 #include "tree-into-ssa.h"
54 #include "tree-cfgcleanup.h"
55 #include "tree-switch-conversion.h"
57 #include "stor-layout.h"
58 #include "gimple-lower-bitint.h"
60 /* Split BITINT_TYPE precisions in 4 categories. Small _BitInt, where
61 target hook says it is a single limb, middle _BitInt which per ABI
62 does not, but there is some INTEGER_TYPE in which arithmetics can be
63 performed (operations on such _BitInt are lowered to casts to that
64 arithmetic type and cast back; e.g. on x86_64 limb is DImode, but
65 target supports TImode, so _BitInt(65) to _BitInt(128) are middle
66 ones), large _BitInt which should by straight line code and
67 finally huge _BitInt which should be handled by loops over the limbs. */
69 enum bitint_prec_kind
{
76 /* Caches to speed up bitint_precision_kind. */
78 static int small_max_prec
, mid_min_prec
, large_min_prec
, huge_min_prec
;
81 /* Categorize _BitInt(PREC) as small, middle, large or huge. */
83 static bitint_prec_kind
84 bitint_precision_kind (int prec
)
86 if (prec
<= small_max_prec
)
87 return bitint_prec_small
;
88 if (huge_min_prec
&& prec
>= huge_min_prec
)
89 return bitint_prec_huge
;
90 if (large_min_prec
&& prec
>= large_min_prec
)
91 return bitint_prec_large
;
92 if (mid_min_prec
&& prec
>= mid_min_prec
)
93 return bitint_prec_middle
;
95 struct bitint_info info
;
96 bool ok
= targetm
.c
.bitint_type_info (prec
, &info
);
98 scalar_int_mode limb_mode
= as_a
<scalar_int_mode
> (info
.limb_mode
);
99 if (prec
<= GET_MODE_PRECISION (limb_mode
))
101 small_max_prec
= prec
;
102 return bitint_prec_small
;
105 && GET_MODE_PRECISION (limb_mode
) < MAX_FIXED_MODE_SIZE
)
106 large_min_prec
= MAX_FIXED_MODE_SIZE
+ 1;
108 limb_prec
= GET_MODE_PRECISION (limb_mode
);
111 if (4 * limb_prec
>= MAX_FIXED_MODE_SIZE
)
112 huge_min_prec
= 4 * limb_prec
;
114 huge_min_prec
= MAX_FIXED_MODE_SIZE
+ 1;
116 if (prec
<= MAX_FIXED_MODE_SIZE
)
118 if (!mid_min_prec
|| prec
< mid_min_prec
)
120 return bitint_prec_middle
;
122 if (large_min_prec
&& prec
<= large_min_prec
)
123 return bitint_prec_large
;
124 return bitint_prec_huge
;
127 /* Same for a TYPE. */
129 static bitint_prec_kind
130 bitint_precision_kind (tree type
)
132 return bitint_precision_kind (TYPE_PRECISION (type
));
135 /* Return minimum precision needed to describe INTEGER_CST
136 CST. All bits above that precision up to precision of
137 TREE_TYPE (CST) are cleared if EXT is set to 0, or set
138 if EXT is set to -1. */
141 bitint_min_cst_precision (tree cst
, int &ext
)
143 ext
= tree_int_cst_sgn (cst
) < 0 ? -1 : 0;
144 wide_int w
= wi::to_wide (cst
);
145 unsigned min_prec
= wi::min_precision (w
, TYPE_SIGN (TREE_TYPE (cst
)));
146 /* For signed values, we don't need to count the sign bit,
147 we'll use constant 0 or -1 for the upper bits. */
148 if (!TYPE_UNSIGNED (TREE_TYPE (cst
)))
152 /* For unsigned values, also try signed min_precision
153 in case the constant has lots of most significant bits set. */
154 unsigned min_prec2
= wi::min_precision (w
, SIGNED
) - 1;
155 if (min_prec2
< min_prec
)
166 /* If OP is middle _BitInt, cast it to corresponding INTEGER_TYPE
167 cached in TYPE and return it. */
170 maybe_cast_middle_bitint (gimple_stmt_iterator
*gsi
, tree op
, tree
&type
)
173 || TREE_CODE (TREE_TYPE (op
)) != BITINT_TYPE
174 || bitint_precision_kind (TREE_TYPE (op
)) != bitint_prec_middle
)
177 int prec
= TYPE_PRECISION (TREE_TYPE (op
));
178 int uns
= TYPE_UNSIGNED (TREE_TYPE (op
));
179 if (type
== NULL_TREE
180 || TYPE_PRECISION (type
) != prec
181 || TYPE_UNSIGNED (type
) != uns
)
182 type
= build_nonstandard_integer_type (prec
, uns
);
184 if (TREE_CODE (op
) != SSA_NAME
)
186 tree nop
= fold_convert (type
, op
);
187 if (is_gimple_val (nop
))
191 tree nop
= make_ssa_name (type
);
192 gimple
*g
= gimple_build_assign (nop
, NOP_EXPR
, op
);
193 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
197 /* Return true if STMT can be handled in a loop from least to most
198 significant limb together with its dependencies. */
201 mergeable_op (gimple
*stmt
)
203 if (!is_gimple_assign (stmt
))
205 switch (gimple_assign_rhs_code (stmt
))
220 tree cnt
= gimple_assign_rhs2 (stmt
);
221 if (tree_fits_uhwi_p (cnt
)
222 && tree_to_uhwi (cnt
) < (unsigned HOST_WIDE_INT
) limb_prec
)
227 case VIEW_CONVERT_EXPR
:
229 tree lhs_type
= TREE_TYPE (gimple_assign_lhs (stmt
));
230 tree rhs_type
= TREE_TYPE (gimple_assign_rhs1 (stmt
));
231 if (TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
232 && TREE_CODE (lhs_type
) == BITINT_TYPE
233 && TREE_CODE (rhs_type
) == BITINT_TYPE
234 && bitint_precision_kind (lhs_type
) >= bitint_prec_large
235 && bitint_precision_kind (rhs_type
) >= bitint_prec_large
236 && (CEIL (TYPE_PRECISION (lhs_type
), limb_prec
)
237 == CEIL (TYPE_PRECISION (rhs_type
), limb_prec
)))
239 if (TYPE_PRECISION (rhs_type
) >= TYPE_PRECISION (lhs_type
))
241 if ((unsigned) TYPE_PRECISION (lhs_type
) % (2 * limb_prec
) != 0)
243 if (bitint_precision_kind (lhs_type
) == bitint_prec_large
)
254 /* Return non-zero if stmt is .{ADD,SUB,MUL}_OVERFLOW call with
255 _Complex large/huge _BitInt lhs which has at most two immediate uses,
256 at most one use in REALPART_EXPR stmt in the same bb and exactly one
257 IMAGPART_EXPR use in the same bb with a single use which casts it to
258 non-BITINT_TYPE integral type. If there is a REALPART_EXPR use,
259 return 2. Such cases (most common uses of those builtins) can be
260 optimized by marking their lhs and lhs of IMAGPART_EXPR and maybe lhs
261 of REALPART_EXPR as not needed to be backed up by a stack variable.
262 For .UBSAN_CHECK_{ADD,SUB,MUL} return 3. */
265 optimizable_arith_overflow (gimple
*stmt
)
267 bool is_ubsan
= false;
268 if (!is_gimple_call (stmt
) || !gimple_call_internal_p (stmt
))
270 switch (gimple_call_internal_fn (stmt
))
272 case IFN_ADD_OVERFLOW
:
273 case IFN_SUB_OVERFLOW
:
274 case IFN_MUL_OVERFLOW
:
276 case IFN_UBSAN_CHECK_ADD
:
277 case IFN_UBSAN_CHECK_SUB
:
278 case IFN_UBSAN_CHECK_MUL
:
284 tree lhs
= gimple_call_lhs (stmt
);
287 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
289 tree type
= is_ubsan
? TREE_TYPE (lhs
) : TREE_TYPE (TREE_TYPE (lhs
));
290 if (TREE_CODE (type
) != BITINT_TYPE
291 || bitint_precision_kind (type
) < bitint_prec_large
)
298 if (!single_imm_use (lhs
, &use_p
, &use_stmt
)
299 || gimple_bb (use_stmt
) != gimple_bb (stmt
)
300 || !gimple_store_p (use_stmt
)
301 || !is_gimple_assign (use_stmt
)
302 || gimple_has_volatile_ops (use_stmt
)
303 || stmt_ends_bb_p (use_stmt
))
311 gimple
*realpart
= NULL
, *cast
= NULL
;
312 FOR_EACH_IMM_USE_FAST (use_p
, ui
, lhs
)
314 gimple
*g
= USE_STMT (use_p
);
315 if (is_gimple_debug (g
))
317 if (!is_gimple_assign (g
) || gimple_bb (g
) != gimple_bb (stmt
))
319 if (gimple_assign_rhs_code (g
) == REALPART_EXPR
)
326 else if (gimple_assign_rhs_code (g
) == IMAGPART_EXPR
)
332 use_operand_p use2_p
;
334 tree lhs2
= gimple_assign_lhs (g
);
335 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs2
))
337 if (!single_imm_use (lhs2
, &use2_p
, &use_stmt
)
338 || gimple_bb (use_stmt
) != gimple_bb (stmt
)
339 || !gimple_assign_cast_p (use_stmt
))
342 lhs2
= gimple_assign_lhs (use_stmt
);
343 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs2
))
344 || TREE_CODE (TREE_TYPE (lhs2
)) == BITINT_TYPE
)
355 /* Punt if the cast stmt appears before realpart stmt, because
356 if both appear, the lowering wants to emit all the code
357 at the location of realpart stmt. */
358 gimple_stmt_iterator gsi
= gsi_for_stmt (realpart
);
359 unsigned int cnt
= 0;
362 gsi_prev_nondebug (&gsi
);
363 if (gsi_end_p (gsi
) || gsi_stmt (gsi
) == cast
)
365 if (gsi_stmt (gsi
) == stmt
)
367 /* If realpart is too far from stmt, punt as well.
368 Usually it will appear right after it. */
377 /* If STMT is some kind of comparison (GIMPLE_COND, comparison assignment)
378 comparing large/huge _BitInt types, return the comparison code and if
379 non-NULL fill in the comparison operands to *POP1 and *POP2. */
382 comparison_op (gimple
*stmt
, tree
*pop1
, tree
*pop2
)
384 tree op1
= NULL_TREE
, op2
= NULL_TREE
;
385 tree_code code
= ERROR_MARK
;
386 if (gimple_code (stmt
) == GIMPLE_COND
)
388 code
= gimple_cond_code (stmt
);
389 op1
= gimple_cond_lhs (stmt
);
390 op2
= gimple_cond_rhs (stmt
);
392 else if (is_gimple_assign (stmt
))
394 code
= gimple_assign_rhs_code (stmt
);
395 op1
= gimple_assign_rhs1 (stmt
);
396 if (TREE_CODE_CLASS (code
) == tcc_comparison
397 || TREE_CODE_CLASS (code
) == tcc_binary
)
398 op2
= gimple_assign_rhs2 (stmt
);
400 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
402 tree type
= TREE_TYPE (op1
);
403 if (TREE_CODE (type
) != BITINT_TYPE
404 || bitint_precision_kind (type
) < bitint_prec_large
)
414 /* Class used during large/huge _BitInt lowering containing all the
415 state for the methods. */
417 struct bitint_large_huge
420 : m_names (NULL
), m_loads (NULL
), m_preserved (NULL
),
421 m_single_use_names (NULL
), m_map (NULL
), m_vars (NULL
),
422 m_limb_type (NULL_TREE
), m_data (vNULL
),
423 m_returns_twice_calls (vNULL
) {}
425 ~bitint_large_huge ();
427 void insert_before (gimple
*);
428 tree
limb_access_type (tree
, tree
);
429 tree
limb_access (tree
, tree
, tree
, bool);
430 tree
build_bit_field_ref (tree
, tree
, unsigned HOST_WIDE_INT
,
431 unsigned HOST_WIDE_INT
);
432 void if_then (gimple
*, profile_probability
, edge
&, edge
&);
433 void if_then_else (gimple
*, profile_probability
, edge
&, edge
&);
434 void if_then_if_then_else (gimple
*g
, gimple
*,
435 profile_probability
, profile_probability
,
436 edge
&, edge
&, edge
&);
437 tree
handle_operand (tree
, tree
);
438 tree
prepare_data_in_out (tree
, tree
, tree
*, tree
= NULL_TREE
);
439 tree
add_cast (tree
, tree
);
440 tree
handle_plus_minus (tree_code
, tree
, tree
, tree
);
441 tree
handle_lshift (tree
, tree
, tree
);
442 tree
handle_cast (tree
, tree
, tree
);
443 tree
handle_bit_field_ref (tree
, tree
);
444 tree
handle_load (gimple
*, tree
);
445 tree
handle_stmt (gimple
*, tree
);
446 tree
handle_operand_addr (tree
, gimple
*, int *, int *);
447 tree
create_loop (tree
, tree
*);
448 tree
lower_mergeable_stmt (gimple
*, tree_code
&, tree
, tree
);
449 tree
lower_comparison_stmt (gimple
*, tree_code
&, tree
, tree
);
450 void lower_shift_stmt (tree
, gimple
*);
451 void lower_muldiv_stmt (tree
, gimple
*);
452 void lower_float_conv_stmt (tree
, gimple
*);
453 tree
arith_overflow_extract_bits (unsigned int, unsigned int, tree
,
455 void finish_arith_overflow (tree
, tree
, tree
, tree
, tree
, tree
, gimple
*,
457 void lower_addsub_overflow (tree
, gimple
*);
458 void lower_mul_overflow (tree
, gimple
*);
459 void lower_cplxpart_stmt (tree
, gimple
*);
460 void lower_complexexpr_stmt (gimple
*);
461 void lower_bit_query (gimple
*);
462 void lower_call (tree
, gimple
*);
463 void lower_asm (gimple
*);
464 void lower_stmt (gimple
*);
466 /* Bitmap of large/huge _BitInt SSA_NAMEs except those can be
467 merged with their uses. */
469 /* Subset of those for lhs of load statements. These will be
470 cleared in m_names if the loads will be mergeable with all
473 /* Bitmap of large/huge _BitInt SSA_NAMEs that should survive
474 to later passes (arguments or return values of calls). */
476 /* Subset of m_names which have a single use. As the lowering
477 can replace various original statements with their lowered
478 form even before it is done iterating over all basic blocks,
479 testing has_single_use for the purpose of emitting clobbers
480 doesn't work properly. */
481 bitmap m_single_use_names
;
482 /* Used for coalescing/partitioning of large/huge _BitInt SSA_NAMEs
485 /* Mapping of the partitions to corresponding decls. */
487 /* Unsigned integer type with limb precision. */
489 /* Its TYPE_SIZE_UNIT. */
490 unsigned HOST_WIDE_INT m_limb_size
;
491 /* Location of a gimple stmt which is being currently lowered. */
493 /* Current stmt iterator where code is being lowered currently. */
494 gimple_stmt_iterator m_gsi
;
495 /* Statement after which any clobbers should be added if non-NULL. */
496 gimple
*m_after_stmt
;
497 /* Set when creating loops to the loop header bb and its preheader. */
498 basic_block m_bb
, m_preheader_bb
;
499 /* Stmt iterator after which initialization statements should be emitted. */
500 gimple_stmt_iterator m_init_gsi
;
501 /* Decl into which a mergeable statement stores result. */
503 /* handle_operand/handle_stmt can be invoked in various ways.
505 lower_mergeable_stmt for large _BitInt calls those with constant
506 idx only, expanding to straight line code, for huge _BitInt
507 emits a loop from least significant limb upwards, where each loop
508 iteration handles 2 limbs, plus there can be up to one full limb
509 and one partial limb processed after the loop, where handle_operand
510 and/or handle_stmt are called with constant idx. m_upwards_2limb
511 is set for this case, false otherwise. m_upwards is true if it
512 is either large or huge _BitInt handled by lower_mergeable_stmt,
513 i.e. indexes always increase.
515 Another way is used by lower_comparison_stmt, which walks limbs
516 from most significant to least significant, partial limb if any
517 processed first with constant idx and then loop processing a single
518 limb per iteration with non-constant idx.
520 Another way is used in lower_shift_stmt, where for LSHIFT_EXPR
521 destination limbs are processed from most significant to least
522 significant or for RSHIFT_EXPR the other way around, in loops or
523 straight line code, but idx usually is non-constant (so from
524 handle_operand/handle_stmt POV random access). The LSHIFT_EXPR
525 handling there can access even partial limbs using non-constant
526 idx (then m_var_msb should be true, for all the other cases
527 including lower_mergeable_stmt/lower_comparison_stmt that is
528 not the case and so m_var_msb should be false.
530 m_first should be set the first time handle_operand/handle_stmt
531 is called and clear when it is called for some other limb with
532 the same argument. If the lowering of an operand (e.g. INTEGER_CST)
533 or statement (e.g. +/-/<< with < limb_prec constant) needs some
534 state between the different calls, when m_first is true it should
535 push some trees to m_data vector and also make sure m_data_cnt is
536 incremented by how many trees were pushed, and when m_first is
537 false, it can use the m_data[m_data_cnt] etc. data or update them,
538 just needs to bump m_data_cnt by the same amount as when it was
539 called with m_first set. The toplevel calls to
540 handle_operand/handle_stmt should set m_data_cnt to 0 and truncate
541 m_data vector when setting m_first to true.
543 m_cast_conditional and m_bitfld_load are used when handling a
544 bit-field load inside of a widening cast. handle_cast sometimes
545 needs to do runtime comparisons and handle_operand only conditionally
546 or even in two separate conditional blocks for one idx (once with
547 constant index after comparing the runtime one for equality with the
548 constant). In these cases, m_cast_conditional is set to true and
549 the bit-field load then communicates its m_data_cnt to handle_cast
550 using m_bitfld_load. */
553 unsigned m_upwards_2limb
;
555 bool m_cast_conditional
;
556 unsigned m_bitfld_load
;
558 unsigned int m_data_cnt
;
559 vec
<gimple
*> m_returns_twice_calls
;
562 bitint_large_huge::~bitint_large_huge ()
564 BITMAP_FREE (m_names
);
565 BITMAP_FREE (m_loads
);
566 BITMAP_FREE (m_preserved
);
567 BITMAP_FREE (m_single_use_names
);
569 delete_var_map (m_map
);
572 m_returns_twice_calls
.release ();
575 /* Insert gimple statement G before current location
576 and set its gimple_location. */
579 bitint_large_huge::insert_before (gimple
*g
)
581 gimple_set_location (g
, m_loc
);
582 gsi_insert_before (&m_gsi
, g
, GSI_SAME_STMT
);
585 /* Return type for accessing limb IDX of BITINT_TYPE TYPE.
586 This is normally m_limb_type, except for a partial most
587 significant limb if any. */
590 bitint_large_huge::limb_access_type (tree type
, tree idx
)
592 if (type
== NULL_TREE
)
594 unsigned HOST_WIDE_INT i
= tree_to_uhwi (idx
);
595 unsigned int prec
= TYPE_PRECISION (type
);
596 gcc_assert (i
* limb_prec
< prec
);
597 if ((i
+ 1) * limb_prec
<= prec
)
600 return build_nonstandard_integer_type (prec
% limb_prec
,
601 TYPE_UNSIGNED (type
));
604 /* Return a tree how to access limb IDX of VAR corresponding to BITINT_TYPE
605 TYPE. If WRITE_P is true, it will be a store, otherwise a read. */
608 bitint_large_huge::limb_access (tree type
, tree var
, tree idx
, bool write_p
)
610 tree atype
= (tree_fits_uhwi_p (idx
)
611 ? limb_access_type (type
, idx
) : m_limb_type
);
612 tree ltype
= m_limb_type
;
613 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (var
));
614 if (as
!= TYPE_ADDR_SPACE (ltype
))
615 ltype
= build_qualified_type (ltype
, TYPE_QUALS (ltype
)
616 | ENCODE_QUAL_ADDR_SPACE (as
));
618 if (DECL_P (var
) && tree_fits_uhwi_p (idx
))
620 tree ptype
= build_pointer_type (strip_array_types (TREE_TYPE (var
)));
621 unsigned HOST_WIDE_INT off
= tree_to_uhwi (idx
) * m_limb_size
;
622 ret
= build2 (MEM_REF
, ltype
,
623 build_fold_addr_expr (var
),
624 build_int_cst (ptype
, off
));
625 TREE_THIS_VOLATILE (ret
) = TREE_THIS_VOLATILE (var
);
626 TREE_SIDE_EFFECTS (ret
) = TREE_SIDE_EFFECTS (var
);
628 else if (TREE_CODE (var
) == MEM_REF
&& tree_fits_uhwi_p (idx
))
631 = build2 (MEM_REF
, ltype
, unshare_expr (TREE_OPERAND (var
, 0)),
632 size_binop (PLUS_EXPR
, TREE_OPERAND (var
, 1),
633 build_int_cst (TREE_TYPE (TREE_OPERAND (var
, 1)),
636 TREE_THIS_VOLATILE (ret
) = TREE_THIS_VOLATILE (var
);
637 TREE_SIDE_EFFECTS (ret
) = TREE_SIDE_EFFECTS (var
);
638 TREE_THIS_NOTRAP (ret
) = TREE_THIS_NOTRAP (var
);
642 var
= unshare_expr (var
);
643 if (TREE_CODE (TREE_TYPE (var
)) != ARRAY_TYPE
644 || !useless_type_conversion_p (m_limb_type
,
645 TREE_TYPE (TREE_TYPE (var
))))
647 unsigned HOST_WIDE_INT nelts
648 = CEIL (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (var
))), limb_prec
);
649 tree atype
= build_array_type_nelts (ltype
, nelts
);
650 var
= build1 (VIEW_CONVERT_EXPR
, atype
, var
);
652 ret
= build4 (ARRAY_REF
, ltype
, var
, idx
, NULL_TREE
, NULL_TREE
);
654 if (!write_p
&& !useless_type_conversion_p (atype
, m_limb_type
))
656 gimple
*g
= gimple_build_assign (make_ssa_name (m_limb_type
), ret
);
658 ret
= gimple_assign_lhs (g
);
659 ret
= build1 (NOP_EXPR
, atype
, ret
);
664 /* Build a BIT_FIELD_REF to access BITSIZE bits with FTYPE type at
665 offset BITPOS inside of OBJ. */
668 bitint_large_huge::build_bit_field_ref (tree ftype
, tree obj
,
669 unsigned HOST_WIDE_INT bitsize
,
670 unsigned HOST_WIDE_INT bitpos
)
672 if (INTEGRAL_TYPE_P (TREE_TYPE (obj
))
673 && !type_has_mode_precision_p (TREE_TYPE (obj
)))
675 unsigned HOST_WIDE_INT nelts
676 = CEIL (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj
))), limb_prec
);
677 tree ltype
= m_limb_type
;
678 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (obj
));
679 if (as
!= TYPE_ADDR_SPACE (ltype
))
680 ltype
= build_qualified_type (ltype
, TYPE_QUALS (ltype
)
681 | ENCODE_QUAL_ADDR_SPACE (as
));
682 tree atype
= build_array_type_nelts (ltype
, nelts
);
683 obj
= build1 (VIEW_CONVERT_EXPR
, atype
, obj
);
685 return build3 (BIT_FIELD_REF
, ftype
, obj
, bitsize_int (bitsize
),
686 bitsize_int (bitpos
));
689 /* Emit a half diamond,
698 or if (COND) new_bb1;
699 PROB is the probability that the condition is true.
700 Updates m_gsi to start of new_bb1.
701 Sets EDGE_TRUE to edge from new_bb1 to successor and
702 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND) bb. */
705 bitint_large_huge::if_then (gimple
*cond
, profile_probability prob
,
706 edge
&edge_true
, edge
&edge_false
)
708 insert_before (cond
);
709 edge e1
= split_block (gsi_bb (m_gsi
), cond
);
710 edge e2
= split_block (e1
->dest
, (gimple
*) NULL
);
711 edge e3
= make_edge (e1
->src
, e2
->dest
, EDGE_FALSE_VALUE
);
712 e1
->flags
= EDGE_TRUE_VALUE
;
713 e1
->probability
= prob
;
714 e3
->probability
= prob
.invert ();
715 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e1
->src
);
718 m_gsi
= gsi_after_labels (e1
->dest
);
721 /* Emit a full diamond,
730 or if (COND) new_bb2; else new_bb1;
731 PROB is the probability that the condition is true.
732 Updates m_gsi to start of new_bb2.
733 Sets EDGE_TRUE to edge from new_bb1 to successor and
734 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND) bb. */
737 bitint_large_huge::if_then_else (gimple
*cond
, profile_probability prob
,
738 edge
&edge_true
, edge
&edge_false
)
740 insert_before (cond
);
741 edge e1
= split_block (gsi_bb (m_gsi
), cond
);
742 edge e2
= split_block (e1
->dest
, (gimple
*) NULL
);
743 basic_block bb
= create_empty_bb (e1
->dest
);
744 add_bb_to_loop (bb
, e1
->dest
->loop_father
);
745 edge e3
= make_edge (e1
->src
, bb
, EDGE_TRUE_VALUE
);
746 e1
->flags
= EDGE_FALSE_VALUE
;
747 e3
->probability
= prob
;
748 e1
->probability
= prob
.invert ();
749 bb
->count
= e1
->src
->count
.apply_probability (prob
);
750 set_immediate_dominator (CDI_DOMINATORS
, bb
, e1
->src
);
751 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e1
->src
);
752 edge_true
= make_single_succ_edge (bb
, e2
->dest
, EDGE_FALLTHRU
);
754 m_gsi
= gsi_after_labels (bb
);
757 /* Emit a half diamond with full diamond in it
771 or if (COND1) { if (COND2) new_bb2; else new_bb1; }
772 PROB1 is the probability that the condition 1 is true.
773 PROB2 is the probability that the condition 2 is true.
774 Updates m_gsi to start of new_bb1.
775 Sets EDGE_TRUE_TRUE to edge from new_bb2 to successor,
776 EDGE_TRUE_FALSE to edge from new_bb1 to successor and
777 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND1) bb.
778 If COND2 is NULL, this is equivalent to
779 if_then (COND1, PROB1, EDGE_TRUE_FALSE, EDGE_FALSE);
780 EDGE_TRUE_TRUE = NULL; */
783 bitint_large_huge::if_then_if_then_else (gimple
*cond1
, gimple
*cond2
,
784 profile_probability prob1
,
785 profile_probability prob2
,
786 edge
&edge_true_true
,
787 edge
&edge_true_false
,
790 edge e2
, e3
, e4
= NULL
;
791 if_then (cond1
, prob1
, e2
, e3
);
794 edge_true_true
= NULL
;
795 edge_true_false
= e2
;
799 insert_before (cond2
);
800 e2
= split_block (gsi_bb (m_gsi
), cond2
);
801 basic_block bb
= create_empty_bb (e2
->dest
);
802 add_bb_to_loop (bb
, e2
->dest
->loop_father
);
803 e4
= make_edge (e2
->src
, bb
, EDGE_TRUE_VALUE
);
804 set_immediate_dominator (CDI_DOMINATORS
, bb
, e2
->src
);
805 e4
->probability
= prob2
;
806 e2
->flags
= EDGE_FALSE_VALUE
;
807 e2
->probability
= prob2
.invert ();
808 bb
->count
= e2
->src
->count
.apply_probability (prob2
);
809 e4
= make_single_succ_edge (bb
, e3
->dest
, EDGE_FALLTHRU
);
810 e2
= find_edge (e2
->dest
, e3
->dest
);
812 edge_true_false
= e2
;
814 m_gsi
= gsi_after_labels (e2
->src
);
817 /* Emit code to access limb IDX from OP. */
820 bitint_large_huge::handle_operand (tree op
, tree idx
)
822 switch (TREE_CODE (op
))
826 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (op
)))
828 if (SSA_NAME_IS_DEFAULT_DEF (op
))
832 tree v
= create_tmp_reg (m_limb_type
);
833 if (SSA_NAME_VAR (op
) && VAR_P (SSA_NAME_VAR (op
)))
835 DECL_NAME (v
) = DECL_NAME (SSA_NAME_VAR (op
));
836 DECL_SOURCE_LOCATION (v
)
837 = DECL_SOURCE_LOCATION (SSA_NAME_VAR (op
));
839 v
= get_or_create_ssa_default_def (cfun
, v
);
840 m_data
.safe_push (v
);
842 tree ret
= m_data
[m_data_cnt
];
844 if (tree_fits_uhwi_p (idx
))
846 tree type
= limb_access_type (TREE_TYPE (op
), idx
);
847 ret
= add_cast (type
, ret
);
851 location_t loc_save
= m_loc
;
852 m_loc
= gimple_location (SSA_NAME_DEF_STMT (op
));
853 tree ret
= handle_stmt (SSA_NAME_DEF_STMT (op
), idx
);
860 p
= var_to_partition (m_map
, op
);
861 gcc_assert (m_vars
[p
] != NULL_TREE
);
862 t
= limb_access (TREE_TYPE (op
), m_vars
[p
], idx
, false);
863 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (t
)), t
);
865 t
= gimple_assign_lhs (g
);
867 && m_single_use_names
868 && m_vars
[p
] != m_lhs
870 && bitmap_bit_p (m_single_use_names
, SSA_NAME_VERSION (op
)))
872 tree clobber
= build_clobber (TREE_TYPE (m_vars
[p
]),
873 CLOBBER_STORAGE_END
);
874 g
= gimple_build_assign (m_vars
[p
], clobber
);
875 gimple_stmt_iterator gsi
= gsi_for_stmt (m_after_stmt
);
876 gsi_insert_after (&gsi
, g
, GSI_SAME_STMT
);
880 if (tree_fits_uhwi_p (idx
))
882 tree c
, type
= limb_access_type (TREE_TYPE (op
), idx
);
883 unsigned HOST_WIDE_INT i
= tree_to_uhwi (idx
);
886 m_data
.safe_push (NULL_TREE
);
887 m_data
.safe_push (NULL_TREE
);
889 if (limb_prec
!= HOST_BITS_PER_WIDE_INT
)
891 wide_int w
= wi::rshift (wi::to_wide (op
), i
* limb_prec
,
892 TYPE_SIGN (TREE_TYPE (op
)));
893 c
= wide_int_to_tree (type
,
894 wide_int::from (w
, TYPE_PRECISION (type
),
897 else if (i
>= TREE_INT_CST_EXT_NUNITS (op
))
898 c
= build_int_cst (type
,
899 tree_int_cst_sgn (op
) < 0 ? -1 : 0);
901 c
= build_int_cst (type
, TREE_INT_CST_ELT (op
, i
));
906 || (m_data
[m_data_cnt
] == NULL_TREE
907 && m_data
[m_data_cnt
+ 1] == NULL_TREE
))
909 unsigned int prec
= TYPE_PRECISION (TREE_TYPE (op
));
910 unsigned int rem
= prec
% ((m_upwards_2limb
? 2 : 1) * limb_prec
);
912 unsigned min_prec
= bitint_min_cst_precision (op
, ext
);
915 m_data
.safe_push (NULL_TREE
);
916 m_data
.safe_push (NULL_TREE
);
918 if (integer_zerop (op
))
920 tree c
= build_zero_cst (m_limb_type
);
921 m_data
[m_data_cnt
] = c
;
922 m_data
[m_data_cnt
+ 1] = c
;
924 else if (integer_all_onesp (op
))
926 tree c
= build_all_ones_cst (m_limb_type
);
927 m_data
[m_data_cnt
] = c
;
928 m_data
[m_data_cnt
+ 1] = c
;
930 else if (m_upwards_2limb
&& min_prec
<= (unsigned) limb_prec
)
932 /* Single limb constant. Use a phi with that limb from
933 the preheader edge and 0 or -1 constant from the other edge
934 and for the second limb in the loop. */
936 gcc_assert (m_first
);
939 prepare_data_in_out (fold_convert (m_limb_type
, op
), idx
, &out
,
940 build_int_cst (m_limb_type
, ext
));
942 else if (min_prec
> prec
- rem
- 2 * limb_prec
)
944 /* Constant which has enough significant bits that it isn't
945 worth trying to save .rodata space by extending from smaller
949 type
= TREE_TYPE (op
);
951 /* If we have a guarantee the most significant partial limb
952 (if any) will be only accessed through handle_operand
953 with INTEGER_CST idx, we don't need to include the partial
955 type
= build_bitint_type (prec
- rem
, 1);
956 tree c
= tree_output_constant_def (fold_convert (type
, op
));
957 m_data
[m_data_cnt
] = c
;
958 m_data
[m_data_cnt
+ 1] = NULL_TREE
;
960 else if (m_upwards_2limb
)
962 /* Constant with smaller number of bits. Trade conditional
963 code for .rodata space by extending from smaller number. */
964 min_prec
= CEIL (min_prec
, 2 * limb_prec
) * (2 * limb_prec
);
965 tree type
= build_bitint_type (min_prec
, 1);
966 tree c
= tree_output_constant_def (fold_convert (type
, op
));
967 tree idx2
= make_ssa_name (sizetype
);
968 g
= gimple_build_assign (idx2
, PLUS_EXPR
, idx
, size_one_node
);
970 g
= gimple_build_cond (LT_EXPR
, idx
,
971 size_int (min_prec
/ limb_prec
),
972 NULL_TREE
, NULL_TREE
);
973 edge edge_true
, edge_false
;
974 if_then (g
, (min_prec
>= (prec
- rem
) / 2
975 ? profile_probability::likely ()
976 : profile_probability::unlikely ()),
977 edge_true
, edge_false
);
978 tree c1
= limb_access (TREE_TYPE (op
), c
, idx
, false);
979 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (c1
)), c1
);
981 c1
= gimple_assign_lhs (g
);
982 tree c2
= limb_access (TREE_TYPE (op
), c
, idx2
, false);
983 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (c2
)), c2
);
985 c2
= gimple_assign_lhs (g
);
986 tree c3
= build_int_cst (m_limb_type
, ext
);
987 m_gsi
= gsi_after_labels (edge_true
->dest
);
988 m_data
[m_data_cnt
] = make_ssa_name (m_limb_type
);
989 m_data
[m_data_cnt
+ 1] = make_ssa_name (m_limb_type
);
990 gphi
*phi
= create_phi_node (m_data
[m_data_cnt
],
992 add_phi_arg (phi
, c1
, edge_true
, UNKNOWN_LOCATION
);
993 add_phi_arg (phi
, c3
, edge_false
, UNKNOWN_LOCATION
);
994 phi
= create_phi_node (m_data
[m_data_cnt
+ 1], edge_true
->dest
);
995 add_phi_arg (phi
, c2
, edge_true
, UNKNOWN_LOCATION
);
996 add_phi_arg (phi
, c3
, edge_false
, UNKNOWN_LOCATION
);
1000 /* Constant with smaller number of bits. Trade conditional
1001 code for .rodata space by extending from smaller number.
1002 Version for loops with random access to the limbs or
1004 min_prec
= CEIL (min_prec
, limb_prec
) * limb_prec
;
1006 if (min_prec
<= (unsigned) limb_prec
)
1007 c
= fold_convert (m_limb_type
, op
);
1010 tree type
= build_bitint_type (min_prec
, 1);
1011 c
= tree_output_constant_def (fold_convert (type
, op
));
1013 m_data
[m_data_cnt
] = c
;
1014 m_data
[m_data_cnt
+ 1] = integer_type_node
;
1016 t
= m_data
[m_data_cnt
];
1017 if (m_data
[m_data_cnt
+ 1] == NULL_TREE
)
1019 t
= limb_access (TREE_TYPE (op
), t
, idx
, false);
1020 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (t
)), t
);
1022 t
= gimple_assign_lhs (g
);
1025 else if (m_data
[m_data_cnt
+ 1] == NULL_TREE
)
1027 t
= limb_access (TREE_TYPE (op
), m_data
[m_data_cnt
], idx
, false);
1028 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (t
)), t
);
1030 t
= gimple_assign_lhs (g
);
1033 t
= m_data
[m_data_cnt
+ 1];
1034 if (m_data
[m_data_cnt
+ 1] == integer_type_node
)
1036 unsigned int prec
= TYPE_PRECISION (TREE_TYPE (op
));
1037 unsigned rem
= prec
% ((m_upwards_2limb
? 2 : 1) * limb_prec
);
1038 int ext
= wi::neg_p (wi::to_wide (op
)) ? -1 : 0;
1039 tree c
= m_data
[m_data_cnt
];
1040 unsigned min_prec
= TYPE_PRECISION (TREE_TYPE (c
));
1041 g
= gimple_build_cond (LT_EXPR
, idx
,
1042 size_int (min_prec
/ limb_prec
),
1043 NULL_TREE
, NULL_TREE
);
1044 edge edge_true
, edge_false
;
1045 if_then (g
, (min_prec
>= (prec
- rem
) / 2
1046 ? profile_probability::likely ()
1047 : profile_probability::unlikely ()),
1048 edge_true
, edge_false
);
1049 if (min_prec
> (unsigned) limb_prec
)
1051 c
= limb_access (TREE_TYPE (op
), c
, idx
, false);
1052 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (c
)), c
);
1054 c
= gimple_assign_lhs (g
);
1056 tree c2
= build_int_cst (m_limb_type
, ext
);
1057 m_gsi
= gsi_after_labels (edge_true
->dest
);
1058 t
= make_ssa_name (m_limb_type
);
1059 gphi
*phi
= create_phi_node (t
, edge_true
->dest
);
1060 add_phi_arg (phi
, c
, edge_true
, UNKNOWN_LOCATION
);
1061 add_phi_arg (phi
, c2
, edge_false
, UNKNOWN_LOCATION
);
1070 /* Helper method, add a PHI node with VAL from preheader edge if
1071 inside of a loop and m_first. Keep state in a pair of m_data
1072 elements. If VAL_OUT is non-NULL, use that as PHI argument from
1073 the latch edge, otherwise create a new SSA_NAME for it and let
1074 caller initialize it. */
1077 bitint_large_huge::prepare_data_in_out (tree val
, tree idx
, tree
*data_out
,
1082 *data_out
= tree_fits_uhwi_p (idx
) ? NULL_TREE
: m_data
[m_data_cnt
+ 1];
1083 return m_data
[m_data_cnt
];
1086 *data_out
= NULL_TREE
;
1087 if (tree_fits_uhwi_p (idx
))
1089 m_data
.safe_push (val
);
1090 m_data
.safe_push (NULL_TREE
);
1094 tree in
= make_ssa_name (TREE_TYPE (val
));
1095 gphi
*phi
= create_phi_node (in
, m_bb
);
1096 edge e1
= find_edge (m_preheader_bb
, m_bb
);
1097 edge e2
= EDGE_PRED (m_bb
, 0);
1099 e2
= EDGE_PRED (m_bb
, 1);
1100 add_phi_arg (phi
, val
, e1
, UNKNOWN_LOCATION
);
1101 tree out
= val_out
? val_out
: make_ssa_name (TREE_TYPE (val
));
1102 add_phi_arg (phi
, out
, e2
, UNKNOWN_LOCATION
);
1103 m_data
.safe_push (in
);
1104 m_data
.safe_push (out
);
1108 /* Return VAL cast to TYPE. If VAL is INTEGER_CST, just
1109 convert it without emitting any code, otherwise emit
1110 the conversion statement before the current location. */
1113 bitint_large_huge::add_cast (tree type
, tree val
)
1115 if (TREE_CODE (val
) == INTEGER_CST
)
1116 return fold_convert (type
, val
);
1118 tree lhs
= make_ssa_name (type
);
1119 gimple
*g
= gimple_build_assign (lhs
, NOP_EXPR
, val
);
1124 /* Helper of handle_stmt method, handle PLUS_EXPR or MINUS_EXPR. */
1127 bitint_large_huge::handle_plus_minus (tree_code code
, tree rhs1
, tree rhs2
,
1130 tree lhs
, data_out
, ctype
;
1131 tree rhs1_type
= TREE_TYPE (rhs1
);
1133 tree data_in
= prepare_data_in_out (build_zero_cst (m_limb_type
), idx
,
1136 if (optab_handler (code
== PLUS_EXPR
? uaddc5_optab
: usubc5_optab
,
1137 TYPE_MODE (m_limb_type
)) != CODE_FOR_nothing
)
1139 ctype
= build_complex_type (m_limb_type
);
1140 if (!types_compatible_p (rhs1_type
, m_limb_type
))
1142 if (!TYPE_UNSIGNED (rhs1_type
))
1144 tree type
= unsigned_type_for (rhs1_type
);
1145 rhs1
= add_cast (type
, rhs1
);
1146 rhs2
= add_cast (type
, rhs2
);
1148 rhs1
= add_cast (m_limb_type
, rhs1
);
1149 rhs2
= add_cast (m_limb_type
, rhs2
);
1151 lhs
= make_ssa_name (ctype
);
1152 g
= gimple_build_call_internal (code
== PLUS_EXPR
1153 ? IFN_UADDC
: IFN_USUBC
,
1154 3, rhs1
, rhs2
, data_in
);
1155 gimple_call_set_lhs (g
, lhs
);
1157 if (data_out
== NULL_TREE
)
1158 data_out
= make_ssa_name (m_limb_type
);
1159 g
= gimple_build_assign (data_out
, IMAGPART_EXPR
,
1160 build1 (IMAGPART_EXPR
, m_limb_type
, lhs
));
1163 else if (types_compatible_p (rhs1_type
, m_limb_type
))
1165 ctype
= build_complex_type (m_limb_type
);
1166 lhs
= make_ssa_name (ctype
);
1167 g
= gimple_build_call_internal (code
== PLUS_EXPR
1168 ? IFN_ADD_OVERFLOW
: IFN_SUB_OVERFLOW
,
1170 gimple_call_set_lhs (g
, lhs
);
1172 if (data_out
== NULL_TREE
)
1173 data_out
= make_ssa_name (m_limb_type
);
1174 if (!integer_zerop (data_in
))
1176 rhs1
= make_ssa_name (m_limb_type
);
1177 g
= gimple_build_assign (rhs1
, REALPART_EXPR
,
1178 build1 (REALPART_EXPR
, m_limb_type
, lhs
));
1180 rhs2
= make_ssa_name (m_limb_type
);
1181 g
= gimple_build_assign (rhs2
, IMAGPART_EXPR
,
1182 build1 (IMAGPART_EXPR
, m_limb_type
, lhs
));
1184 lhs
= make_ssa_name (ctype
);
1185 g
= gimple_build_call_internal (code
== PLUS_EXPR
1189 gimple_call_set_lhs (g
, lhs
);
1191 data_in
= make_ssa_name (m_limb_type
);
1192 g
= gimple_build_assign (data_in
, IMAGPART_EXPR
,
1193 build1 (IMAGPART_EXPR
, m_limb_type
, lhs
));
1195 g
= gimple_build_assign (data_out
, PLUS_EXPR
, rhs2
, data_in
);
1200 g
= gimple_build_assign (data_out
, IMAGPART_EXPR
,
1201 build1 (IMAGPART_EXPR
, m_limb_type
, lhs
));
1207 tree in
= add_cast (rhs1_type
, data_in
);
1208 lhs
= make_ssa_name (rhs1_type
);
1209 g
= gimple_build_assign (lhs
, code
, rhs1
, rhs2
);
1211 rhs1
= make_ssa_name (rhs1_type
);
1212 g
= gimple_build_assign (rhs1
, code
, lhs
, in
);
1214 m_data
[m_data_cnt
] = NULL_TREE
;
1218 rhs1
= make_ssa_name (m_limb_type
);
1219 g
= gimple_build_assign (rhs1
, REALPART_EXPR
,
1220 build1 (REALPART_EXPR
, m_limb_type
, lhs
));
1222 if (!types_compatible_p (rhs1_type
, m_limb_type
))
1223 rhs1
= add_cast (rhs1_type
, rhs1
);
1224 m_data
[m_data_cnt
] = data_out
;
1229 /* Helper function for handle_stmt method, handle LSHIFT_EXPR by
1230 count in [0, limb_prec - 1] range. */
1233 bitint_large_huge::handle_lshift (tree rhs1
, tree rhs2
, tree idx
)
1235 unsigned HOST_WIDE_INT cnt
= tree_to_uhwi (rhs2
);
1236 gcc_checking_assert (cnt
< (unsigned) limb_prec
);
1240 tree lhs
, data_out
, rhs1_type
= TREE_TYPE (rhs1
);
1242 tree data_in
= prepare_data_in_out (build_zero_cst (m_limb_type
), idx
,
1245 if (!integer_zerop (data_in
))
1247 lhs
= make_ssa_name (m_limb_type
);
1248 g
= gimple_build_assign (lhs
, RSHIFT_EXPR
, data_in
,
1249 build_int_cst (unsigned_type_node
,
1252 if (!types_compatible_p (rhs1_type
, m_limb_type
))
1253 lhs
= add_cast (rhs1_type
, lhs
);
1256 if (types_compatible_p (rhs1_type
, m_limb_type
))
1258 if (data_out
== NULL_TREE
)
1259 data_out
= make_ssa_name (m_limb_type
);
1260 g
= gimple_build_assign (data_out
, rhs1
);
1263 if (cnt
< (unsigned) TYPE_PRECISION (rhs1_type
))
1265 lhs
= make_ssa_name (rhs1_type
);
1266 g
= gimple_build_assign (lhs
, LSHIFT_EXPR
, rhs1
, rhs2
);
1268 if (!integer_zerop (data_in
))
1271 lhs
= make_ssa_name (rhs1_type
);
1272 g
= gimple_build_assign (lhs
, BIT_IOR_EXPR
, rhs1
, data_in
);
1278 m_data
[m_data_cnt
] = data_out
;
1283 /* Helper function for handle_stmt method, handle an integral
1284 to integral conversion. */
1287 bitint_large_huge::handle_cast (tree lhs_type
, tree rhs1
, tree idx
)
1289 tree rhs_type
= TREE_TYPE (rhs1
);
1291 if ((TREE_CODE (rhs1
) == SSA_NAME
|| TREE_CODE (rhs1
) == INTEGER_CST
)
1292 && TREE_CODE (lhs_type
) == BITINT_TYPE
1293 && TREE_CODE (rhs_type
) == BITINT_TYPE
1294 && bitint_precision_kind (lhs_type
) >= bitint_prec_large
1295 && bitint_precision_kind (rhs_type
) >= bitint_prec_large
)
1297 if (TYPE_PRECISION (rhs_type
) >= TYPE_PRECISION (lhs_type
)
1298 /* If lhs has bigger precision than rhs, we can use
1299 the simple case only if there is a guarantee that
1300 the most significant limb is handled in straight
1301 line code. If m_var_msb (on left shifts) or
1302 if m_upwards_2limb * limb_prec is equal to
1303 lhs precision or if not m_upwards_2limb and lhs_type
1304 has precision which is multiple of limb_prec that is
1307 && (CEIL (TYPE_PRECISION (lhs_type
), limb_prec
)
1308 == CEIL (TYPE_PRECISION (rhs_type
), limb_prec
))
1309 && ((!m_upwards_2limb
1310 && (TYPE_PRECISION (lhs_type
) % limb_prec
!= 0))
1312 && (m_upwards_2limb
* limb_prec
1313 < TYPE_PRECISION (lhs_type
))))))
1315 rhs1
= handle_operand (rhs1
, idx
);
1316 if (tree_fits_uhwi_p (idx
))
1318 tree type
= limb_access_type (lhs_type
, idx
);
1319 if (!types_compatible_p (type
, TREE_TYPE (rhs1
)))
1320 rhs1
= add_cast (type
, rhs1
);
1325 /* Indexes lower than this don't need any special processing. */
1326 unsigned low
= ((unsigned) TYPE_PRECISION (rhs_type
)
1327 - !TYPE_UNSIGNED (rhs_type
)) / limb_prec
;
1328 /* Indexes >= than this always contain an extension. */
1329 unsigned high
= CEIL ((unsigned) TYPE_PRECISION (rhs_type
), limb_prec
);
1330 bool save_first
= m_first
;
1333 m_data
.safe_push (NULL_TREE
);
1334 m_data
.safe_push (NULL_TREE
);
1335 m_data
.safe_push (NULL_TREE
);
1336 if (TYPE_UNSIGNED (rhs_type
))
1337 /* No need to keep state between iterations. */
1339 else if (m_upwards
&& !m_upwards_2limb
)
1340 /* We need to keep state between iterations, but
1341 not within any loop, everything is straight line
1342 code with only increasing indexes. */
1344 else if (!m_upwards_2limb
)
1346 unsigned save_data_cnt
= m_data_cnt
;
1347 gimple_stmt_iterator save_gsi
= m_gsi
;
1349 if (gsi_end_p (m_gsi
))
1350 m_gsi
= gsi_after_labels (gsi_bb (m_gsi
));
1353 m_data_cnt
= save_data_cnt
+ 3;
1354 t
= handle_operand (rhs1
, size_int (low
));
1356 m_data
[save_data_cnt
+ 2]
1357 = build_int_cst (NULL_TREE
, m_data_cnt
);
1358 m_data_cnt
= save_data_cnt
;
1359 t
= add_cast (signed_type_for (m_limb_type
), t
);
1360 tree lpm1
= build_int_cst (unsigned_type_node
, limb_prec
- 1);
1361 tree n
= make_ssa_name (TREE_TYPE (t
));
1362 g
= gimple_build_assign (n
, RSHIFT_EXPR
, t
, lpm1
);
1364 m_data
[save_data_cnt
+ 1] = add_cast (m_limb_type
, n
);
1366 if (gsi_end_p (m_init_gsi
))
1367 m_init_gsi
= gsi_last_bb (gsi_bb (m_init_gsi
));
1369 gsi_prev (&m_init_gsi
);
1372 else if (m_upwards_2limb
* limb_prec
< TYPE_PRECISION (rhs_type
))
1373 /* We need to keep state between iterations, but
1374 fortunately not within the loop, only afterwards. */
1379 m_data
.truncate (m_data_cnt
);
1380 prepare_data_in_out (build_zero_cst (m_limb_type
), idx
, &out
);
1381 m_data
.safe_push (NULL_TREE
);
1385 unsigned save_data_cnt
= m_data_cnt
;
1387 if (!tree_fits_uhwi_p (idx
))
1390 && low
>= m_upwards_2limb
- m_first
)
1392 rhs1
= handle_operand (rhs1
, idx
);
1394 m_data
[save_data_cnt
+ 2]
1395 = build_int_cst (NULL_TREE
, m_data_cnt
);
1396 m_first
= save_first
;
1399 bool single_comparison
1400 = low
== high
|| (m_upwards_2limb
&& (low
& 1) == m_first
);
1402 if (!single_comparison
1405 && low
+ 1 == m_upwards_2limb
)
1406 /* In this case we know that idx <= low always,
1407 so effectively we just needs a single comparison,
1408 idx < low or idx == low, but we'd need to emit different
1409 code for the 2 branches than single_comparison normally
1410 emits. So, instead of special-casing that, emit a
1411 low <= low comparison which cfg cleanup will clean up
1412 at the end of the pass. */
1413 idxc
= size_int (low
);
1414 g
= gimple_build_cond (single_comparison
? LT_EXPR
: LE_EXPR
,
1415 idxc
, size_int (low
), NULL_TREE
, NULL_TREE
);
1416 edge edge_true_true
, edge_true_false
, edge_false
;
1417 if_then_if_then_else (g
, (single_comparison
? NULL
1418 : gimple_build_cond (EQ_EXPR
, idx
,
1422 profile_probability::likely (),
1423 profile_probability::unlikely (),
1424 edge_true_true
, edge_true_false
, edge_false
);
1425 bool save_cast_conditional
= m_cast_conditional
;
1426 m_cast_conditional
= true;
1428 tree t1
= handle_operand (rhs1
, idx
), t2
= NULL_TREE
;
1430 m_data
[save_data_cnt
+ 2]
1431 = build_int_cst (NULL_TREE
, m_data_cnt
);
1432 tree ext
= NULL_TREE
;
1433 tree bitfld
= NULL_TREE
;
1434 if (!single_comparison
)
1436 m_gsi
= gsi_after_labels (edge_true_true
->src
);
1438 m_data_cnt
= save_data_cnt
+ 3;
1441 bitfld
= m_data
[m_bitfld_load
];
1442 m_data
[m_bitfld_load
] = m_data
[m_bitfld_load
+ 2];
1445 t2
= handle_operand (rhs1
, size_int (low
));
1446 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (t2
)))
1447 t2
= add_cast (m_limb_type
, t2
);
1448 if (!TYPE_UNSIGNED (rhs_type
) && m_upwards_2limb
)
1450 ext
= add_cast (signed_type_for (m_limb_type
), t2
);
1451 tree lpm1
= build_int_cst (unsigned_type_node
,
1453 tree n
= make_ssa_name (TREE_TYPE (ext
));
1454 g
= gimple_build_assign (n
, RSHIFT_EXPR
, ext
, lpm1
);
1456 ext
= add_cast (m_limb_type
, n
);
1460 if (TYPE_UNSIGNED (rhs_type
))
1461 t3
= build_zero_cst (m_limb_type
);
1462 else if (m_upwards_2limb
&& (save_first
|| ext
!= NULL_TREE
))
1463 t3
= m_data
[save_data_cnt
];
1465 t3
= m_data
[save_data_cnt
+ 1];
1466 m_gsi
= gsi_after_labels (edge_true_false
->dest
);
1467 t
= make_ssa_name (m_limb_type
);
1468 gphi
*phi
= create_phi_node (t
, edge_true_false
->dest
);
1469 add_phi_arg (phi
, t1
, edge_true_false
, UNKNOWN_LOCATION
);
1470 add_phi_arg (phi
, t3
, edge_false
, UNKNOWN_LOCATION
);
1472 add_phi_arg (phi
, t2
, edge_true_true
, UNKNOWN_LOCATION
);
1475 tree t4
= make_ssa_name (m_limb_type
);
1476 phi
= create_phi_node (t4
, edge_true_false
->dest
);
1477 add_phi_arg (phi
, build_zero_cst (m_limb_type
), edge_true_false
,
1479 add_phi_arg (phi
, m_data
[save_data_cnt
], edge_false
,
1481 add_phi_arg (phi
, ext
, edge_true_true
, UNKNOWN_LOCATION
);
1482 if (!save_cast_conditional
)
1484 g
= gimple_build_assign (m_data
[save_data_cnt
+ 1], t4
);
1488 for (basic_block bb
= gsi_bb (m_gsi
);;)
1490 edge e1
= single_succ_edge (bb
);
1491 edge e2
= find_edge (e1
->dest
, m_bb
), e3
;
1492 tree t5
= (e2
? m_data
[save_data_cnt
+ 1]
1493 : make_ssa_name (m_limb_type
));
1494 phi
= create_phi_node (t5
, e1
->dest
);
1496 FOR_EACH_EDGE (e3
, ei
, e1
->dest
->preds
)
1497 add_phi_arg (phi
, (e3
== e1
? t4
1498 : build_zero_cst (m_limb_type
)),
1499 e3
, UNKNOWN_LOCATION
);
1509 if (!save_first
&& !save_cast_conditional
)
1510 t4
= m_data
[m_bitfld_load
+ 1];
1512 t4
= make_ssa_name (m_limb_type
);
1513 phi
= create_phi_node (t4
, edge_true_false
->dest
);
1515 edge_true_true
? bitfld
: m_data
[m_bitfld_load
],
1516 edge_true_false
, UNKNOWN_LOCATION
);
1517 add_phi_arg (phi
, m_data
[m_bitfld_load
+ 2],
1518 edge_false
, UNKNOWN_LOCATION
);
1520 add_phi_arg (phi
, m_data
[m_bitfld_load
], edge_true_true
,
1522 if (save_cast_conditional
)
1523 for (basic_block bb
= gsi_bb (m_gsi
);;)
1525 edge e1
= single_succ_edge (bb
);
1526 edge e2
= find_edge (e1
->dest
, m_bb
), e3
;
1527 tree t5
= ((e2
&& !save_first
) ? m_data
[m_bitfld_load
+ 1]
1528 : make_ssa_name (m_limb_type
));
1529 phi
= create_phi_node (t5
, e1
->dest
);
1531 FOR_EACH_EDGE (e3
, ei
, e1
->dest
->preds
)
1532 add_phi_arg (phi
, (e3
== e1
? t4
1533 : build_zero_cst (m_limb_type
)),
1534 e3
, UNKNOWN_LOCATION
);
1540 m_data
[m_bitfld_load
] = t4
;
1541 m_data
[m_bitfld_load
+ 2] = t4
;
1544 m_cast_conditional
= save_cast_conditional
;
1545 m_first
= save_first
;
1550 if (tree_to_uhwi (idx
) < low
)
1552 t
= handle_operand (rhs1
, idx
);
1554 m_data
[save_data_cnt
+ 2]
1555 = build_int_cst (NULL_TREE
, m_data_cnt
);
1557 else if (tree_to_uhwi (idx
) < high
)
1559 t
= handle_operand (rhs1
, size_int (low
));
1561 m_data
[save_data_cnt
+ 2]
1562 = build_int_cst (NULL_TREE
, m_data_cnt
);
1563 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (t
)))
1564 t
= add_cast (m_limb_type
, t
);
1565 tree ext
= NULL_TREE
;
1566 if (!TYPE_UNSIGNED (rhs_type
) && m_upwards
)
1568 ext
= add_cast (signed_type_for (m_limb_type
), t
);
1569 tree lpm1
= build_int_cst (unsigned_type_node
,
1571 tree n
= make_ssa_name (TREE_TYPE (ext
));
1572 g
= gimple_build_assign (n
, RSHIFT_EXPR
, ext
, lpm1
);
1574 ext
= add_cast (m_limb_type
, n
);
1575 m_data
[save_data_cnt
+ 1] = ext
;
1580 if (TYPE_UNSIGNED (rhs_type
) && m_first
)
1582 handle_operand (rhs1
, size_zero_node
);
1583 m_data
[save_data_cnt
+ 2]
1584 = build_int_cst (NULL_TREE
, m_data_cnt
);
1587 m_data_cnt
= tree_to_uhwi (m_data
[save_data_cnt
+ 2]);
1588 if (TYPE_UNSIGNED (rhs_type
))
1589 t
= build_zero_cst (m_limb_type
);
1590 else if (m_bb
&& m_data
[save_data_cnt
])
1591 t
= m_data
[save_data_cnt
];
1593 t
= m_data
[save_data_cnt
+ 1];
1595 tree type
= limb_access_type (lhs_type
, idx
);
1596 if (!useless_type_conversion_p (type
, m_limb_type
))
1597 t
= add_cast (type
, t
);
1598 m_first
= save_first
;
1602 else if (TREE_CODE (lhs_type
) == BITINT_TYPE
1603 && bitint_precision_kind (lhs_type
) >= bitint_prec_large
1604 && INTEGRAL_TYPE_P (rhs_type
))
1606 /* Add support for 3 or more limbs filled in from normal integral
1607 type if this assert fails. If no target chooses limb mode smaller
1608 than half of largest supported normal integral type, this will not
1610 gcc_assert (TYPE_PRECISION (rhs_type
) <= 2 * limb_prec
);
1611 tree r1
= NULL_TREE
, r2
= NULL_TREE
, rext
= NULL_TREE
;
1614 gimple_stmt_iterator save_gsi
= m_gsi
;
1616 if (gsi_end_p (m_gsi
))
1617 m_gsi
= gsi_after_labels (gsi_bb (m_gsi
));
1620 if (TREE_CODE (rhs_type
) == BITINT_TYPE
1621 && bitint_precision_kind (rhs_type
) == bitint_prec_middle
)
1623 tree type
= NULL_TREE
;
1624 rhs1
= maybe_cast_middle_bitint (&m_gsi
, rhs1
, type
);
1625 rhs_type
= TREE_TYPE (rhs1
);
1628 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (rhs1
)))
1629 r1
= add_cast (m_limb_type
, rhs1
);
1630 if (TYPE_PRECISION (rhs_type
) > limb_prec
)
1632 g
= gimple_build_assign (make_ssa_name (rhs_type
),
1634 build_int_cst (unsigned_type_node
,
1637 r2
= add_cast (m_limb_type
, gimple_assign_lhs (g
));
1639 if (TYPE_UNSIGNED (rhs_type
))
1640 rext
= build_zero_cst (m_limb_type
);
1643 rext
= add_cast (signed_type_for (m_limb_type
), r2
? r2
: r1
);
1644 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (rext
)),
1646 build_int_cst (unsigned_type_node
,
1649 rext
= add_cast (m_limb_type
, gimple_assign_lhs (g
));
1652 if (gsi_end_p (m_init_gsi
))
1653 m_init_gsi
= gsi_last_bb (gsi_bb (m_init_gsi
));
1655 gsi_prev (&m_init_gsi
);
1659 if (m_upwards_2limb
)
1664 prepare_data_in_out (r1
, idx
, &out1
, rext
);
1665 if (TYPE_PRECISION (rhs_type
) > limb_prec
)
1667 prepare_data_in_out (r2
, idx
, &out2
, rext
);
1670 m_data
[m_data_cnt
+ 1] = t
;
1673 m_data
[m_data_cnt
+ 1] = rext
;
1674 m_data
.safe_push (rext
);
1675 t
= m_data
[m_data_cnt
];
1677 else if (!tree_fits_uhwi_p (idx
))
1678 t
= m_data
[m_data_cnt
+ 1];
1681 tree type
= limb_access_type (lhs_type
, idx
);
1682 t
= m_data
[m_data_cnt
+ 2];
1683 if (!useless_type_conversion_p (type
, m_limb_type
))
1684 t
= add_cast (type
, t
);
1691 m_data
.safe_push (r1
);
1692 m_data
.safe_push (r2
);
1693 m_data
.safe_push (rext
);
1695 if (tree_fits_uhwi_p (idx
))
1697 tree type
= limb_access_type (lhs_type
, idx
);
1698 if (integer_zerop (idx
))
1699 t
= m_data
[m_data_cnt
];
1700 else if (TYPE_PRECISION (rhs_type
) > limb_prec
1701 && integer_onep (idx
))
1702 t
= m_data
[m_data_cnt
+ 1];
1704 t
= m_data
[m_data_cnt
+ 2];
1705 if (!useless_type_conversion_p (type
, m_limb_type
))
1706 t
= add_cast (type
, t
);
1710 g
= gimple_build_cond (NE_EXPR
, idx
, size_zero_node
,
1711 NULL_TREE
, NULL_TREE
);
1712 edge e2
, e3
, e4
= NULL
;
1713 if_then (g
, profile_probability::likely (), e2
, e3
);
1714 if (m_data
[m_data_cnt
+ 1])
1716 g
= gimple_build_cond (EQ_EXPR
, idx
, size_one_node
,
1717 NULL_TREE
, NULL_TREE
);
1719 edge e5
= split_block (gsi_bb (m_gsi
), g
);
1720 e4
= make_edge (e5
->src
, e2
->dest
, EDGE_TRUE_VALUE
);
1721 e2
= find_edge (e5
->dest
, e2
->dest
);
1722 e4
->probability
= profile_probability::unlikely ();
1723 e5
->flags
= EDGE_FALSE_VALUE
;
1724 e5
->probability
= e4
->probability
.invert ();
1726 m_gsi
= gsi_after_labels (e2
->dest
);
1727 t
= make_ssa_name (m_limb_type
);
1728 gphi
*phi
= create_phi_node (t
, e2
->dest
);
1729 add_phi_arg (phi
, m_data
[m_data_cnt
+ 2], e2
, UNKNOWN_LOCATION
);
1730 add_phi_arg (phi
, m_data
[m_data_cnt
], e3
, UNKNOWN_LOCATION
);
1732 add_phi_arg (phi
, m_data
[m_data_cnt
+ 1], e4
, UNKNOWN_LOCATION
);
1739 /* Helper function for handle_stmt method, handle a BIT_FIELD_REF. */
1742 bitint_large_huge::handle_bit_field_ref (tree op
, tree idx
)
1744 if (tree_fits_uhwi_p (idx
))
1747 m_data
.safe_push (NULL
);
1749 unsigned HOST_WIDE_INT sz
= tree_to_uhwi (TYPE_SIZE (m_limb_type
));
1750 tree bfr
= build3 (BIT_FIELD_REF
, m_limb_type
,
1751 TREE_OPERAND (op
, 0),
1752 TYPE_SIZE (m_limb_type
),
1753 size_binop (PLUS_EXPR
, TREE_OPERAND (op
, 2),
1754 bitsize_int (tree_to_uhwi (idx
) * sz
)));
1755 tree r
= make_ssa_name (m_limb_type
);
1756 gimple
*g
= gimple_build_assign (r
, bfr
);
1758 tree type
= limb_access_type (TREE_TYPE (op
), idx
);
1759 if (!useless_type_conversion_p (type
, m_limb_type
))
1760 r
= add_cast (type
, r
);
1766 unsigned HOST_WIDE_INT sz
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (op
)));
1769 if (bitwise_mode_for_size (sz
).exists (&mode
)
1770 && known_eq (GET_MODE_BITSIZE (mode
), sz
))
1771 type
= bitwise_type_for_mode (mode
);
1775 type
= TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (op
, 0)));
1777 if (TYPE_ALIGN (type
) < TYPE_ALIGN (TREE_TYPE (op
)))
1778 type
= build_aligned_type (type
, TYPE_ALIGN (TREE_TYPE (op
)));
1779 var
= create_tmp_var (type
);
1780 TREE_ADDRESSABLE (var
) = 1;
1782 if (mode
!= VOIDmode
)
1784 bfr
= build3 (BIT_FIELD_REF
, type
, TREE_OPERAND (op
, 0),
1785 TYPE_SIZE (type
), TREE_OPERAND (op
, 2));
1786 g
= gimple_build_assign (make_ssa_name (type
),
1787 BIT_FIELD_REF
, bfr
);
1788 gimple_set_location (g
, m_loc
);
1789 gsi_insert_after (&m_init_gsi
, g
, GSI_NEW_STMT
);
1790 bfr
= gimple_assign_lhs (g
);
1793 bfr
= TREE_OPERAND (op
, 0);
1794 g
= gimple_build_assign (var
, bfr
);
1795 gimple_set_location (g
, m_loc
);
1796 gsi_insert_after (&m_init_gsi
, g
, GSI_NEW_STMT
);
1797 if (mode
== VOIDmode
)
1799 unsigned HOST_WIDE_INT nelts
1800 = CEIL (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (op
))), limb_prec
);
1801 tree atype
= build_array_type_nelts (m_limb_type
, nelts
);
1802 var
= build2 (MEM_REF
, atype
, build_fold_addr_expr (var
),
1803 build_int_cst (build_pointer_type (type
),
1804 tree_to_uhwi (TREE_OPERAND (op
, 2))
1807 m_data
.safe_push (var
);
1810 var
= unshare_expr (m_data
[m_data_cnt
]);
1812 var
= limb_access (TREE_TYPE (op
), var
, idx
, false);
1813 tree r
= make_ssa_name (m_limb_type
);
1814 gimple
*g
= gimple_build_assign (r
, var
);
1819 /* Add a new EH edge from SRC to EH_EDGE->dest, where EH_EDGE
1820 is an older EH edge, and except for virtual PHIs duplicate the
1821 PHI argument from the EH_EDGE to the new EH edge. */
1824 add_eh_edge (basic_block src
, edge eh_edge
)
1826 edge e
= make_edge (src
, eh_edge
->dest
, EDGE_EH
);
1827 e
->probability
= profile_probability::very_unlikely ();
1828 for (gphi_iterator gsi
= gsi_start_phis (eh_edge
->dest
);
1829 !gsi_end_p (gsi
); gsi_next (&gsi
))
1831 gphi
*phi
= gsi
.phi ();
1832 tree lhs
= gimple_phi_result (phi
);
1833 if (virtual_operand_p (lhs
))
1835 const phi_arg_d
*arg
= gimple_phi_arg (phi
, eh_edge
->dest_idx
);
1836 add_phi_arg (phi
, arg
->def
, e
, arg
->locus
);
1840 /* Helper function for handle_stmt method, handle a load from memory. */
1843 bitint_large_huge::handle_load (gimple
*stmt
, tree idx
)
1845 tree rhs1
= gimple_assign_rhs1 (stmt
);
1846 tree rhs_type
= TREE_TYPE (rhs1
);
1847 bool eh
= stmt_ends_bb_p (stmt
);
1848 edge eh_edge
= NULL
;
1854 basic_block bb
= gimple_bb (stmt
);
1856 FOR_EACH_EDGE (eh_edge
, ei
, bb
->succs
)
1857 if (eh_edge
->flags
& EDGE_EH
)
1861 if (TREE_CODE (rhs1
) == COMPONENT_REF
1862 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (rhs1
, 1)))
1864 tree fld
= TREE_OPERAND (rhs1
, 1);
1865 /* For little-endian, we can allow as inputs bit-fields
1866 which start at a limb boundary. */
1867 gcc_assert (tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld
)));
1868 if (DECL_OFFSET_ALIGN (fld
) >= TYPE_ALIGN (TREE_TYPE (rhs1
))
1869 && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
)) % limb_prec
) == 0)
1871 /* Even if DECL_FIELD_BIT_OFFSET (fld) is a multiple of UNITS_PER_BIT,
1872 handle it normally for now. */
1873 if ((tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
)) % BITS_PER_UNIT
) == 0)
1875 tree repr
= DECL_BIT_FIELD_REPRESENTATIVE (fld
);
1876 poly_int64 bitoffset
;
1877 poly_uint64 field_offset
, repr_offset
;
1878 bool var_field_off
= false;
1879 if (poly_int_tree_p (DECL_FIELD_OFFSET (fld
), &field_offset
)
1880 && poly_int_tree_p (DECL_FIELD_OFFSET (repr
), &repr_offset
))
1881 bitoffset
= (field_offset
- repr_offset
) * BITS_PER_UNIT
;
1885 var_field_off
= true;
1887 bitoffset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
))
1888 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr
)));
1889 tree nrhs1
= build3 (COMPONENT_REF
, TREE_TYPE (repr
),
1890 TREE_OPERAND (rhs1
, 0), repr
,
1891 var_field_off
? TREE_OPERAND (rhs1
, 2) : NULL_TREE
);
1892 HOST_WIDE_INT bo
= bitoffset
.to_constant ();
1893 unsigned bo_idx
= (unsigned HOST_WIDE_INT
) bo
/ limb_prec
;
1894 unsigned bo_bit
= (unsigned HOST_WIDE_INT
) bo
% limb_prec
;
1899 gimple_stmt_iterator save_gsi
= m_gsi
;
1901 if (gsi_end_p (m_gsi
))
1902 m_gsi
= gsi_after_labels (gsi_bb (m_gsi
));
1905 tree t
= limb_access (NULL_TREE
, nrhs1
, size_int (bo_idx
), true);
1906 tree iv
= make_ssa_name (m_limb_type
);
1907 g
= gimple_build_assign (iv
, t
);
1911 maybe_duplicate_eh_stmt (g
, stmt
);
1914 edge e
= split_block (gsi_bb (m_gsi
), g
);
1915 add_eh_edge (e
->src
, eh_edge
);
1916 m_gsi
= gsi_after_labels (e
->dest
);
1917 if (gsi_bb (save_gsi
) == e
->src
)
1919 if (gsi_end_p (save_gsi
))
1920 save_gsi
= gsi_end_bb (e
->dest
);
1922 save_gsi
= gsi_for_stmt (gsi_stmt (save_gsi
));
1924 if (m_preheader_bb
== e
->src
)
1925 m_preheader_bb
= e
->dest
;
1929 if (gsi_end_p (m_init_gsi
))
1930 m_init_gsi
= gsi_last_bb (gsi_bb (m_init_gsi
));
1932 gsi_prev (&m_init_gsi
);
1935 prepare_data_in_out (iv
, idx
, &out
);
1936 out
= m_data
[m_data_cnt
];
1937 m_data
.safe_push (out
);
1941 m_data
.safe_push (NULL_TREE
);
1942 m_data
.safe_push (NULL_TREE
);
1943 m_data
.safe_push (NULL_TREE
);
1947 tree nidx0
= NULL_TREE
, nidx1
;
1948 tree iv
= m_data
[m_data_cnt
];
1949 if (m_cast_conditional
&& iv
)
1951 gcc_assert (!m_bitfld_load
);
1952 m_bitfld_load
= m_data_cnt
;
1954 if (tree_fits_uhwi_p (idx
))
1956 unsigned prec
= TYPE_PRECISION (rhs_type
);
1957 unsigned HOST_WIDE_INT i
= tree_to_uhwi (idx
);
1958 gcc_assert (i
* limb_prec
< prec
);
1959 nidx1
= size_int (i
+ bo_idx
+ 1);
1960 if ((i
+ 1) * limb_prec
> prec
)
1963 if (prec
+ bo_bit
<= (unsigned) limb_prec
)
1967 nidx0
= size_int (i
+ bo_idx
);
1977 nidx0
= make_ssa_name (sizetype
);
1978 g
= gimple_build_assign (nidx0
, PLUS_EXPR
, idx
,
1983 nidx1
= make_ssa_name (sizetype
);
1984 g
= gimple_build_assign (nidx1
, PLUS_EXPR
, idx
,
1985 size_int (bo_idx
+ 1));
1989 tree iv2
= NULL_TREE
;
1992 tree t
= limb_access (NULL_TREE
, nrhs1
, nidx0
, true);
1993 iv
= make_ssa_name (m_limb_type
);
1994 g
= gimple_build_assign (iv
, t
);
2000 bool conditional
= m_var_msb
&& !tree_fits_uhwi_p (idx
);
2001 unsigned prec
= TYPE_PRECISION (rhs_type
);
2004 if ((prec
% limb_prec
) == 0
2005 || ((prec
% limb_prec
) + bo_bit
> (unsigned) limb_prec
))
2006 conditional
= false;
2008 edge edge_true
= NULL
, edge_false
= NULL
;
2011 g
= gimple_build_cond (NE_EXPR
, idx
,
2012 size_int (prec
/ limb_prec
),
2013 NULL_TREE
, NULL_TREE
);
2014 if_then (g
, profile_probability::likely (),
2015 edge_true
, edge_false
);
2017 tree t
= limb_access (NULL_TREE
, nrhs1
, nidx1
, true);
2021 && !tree_fits_uhwi_p (idx
))
2022 iv2
= m_data
[m_data_cnt
+ 1];
2024 iv2
= make_ssa_name (m_limb_type
);
2025 g
= gimple_build_assign (iv2
, t
);
2029 maybe_duplicate_eh_stmt (g
, stmt
);
2032 edge e
= split_block (gsi_bb (m_gsi
), g
);
2033 m_gsi
= gsi_after_labels (e
->dest
);
2034 add_eh_edge (e
->src
, eh_edge
);
2039 tree iv3
= make_ssa_name (m_limb_type
);
2041 edge_true
= find_edge (gsi_bb (m_gsi
), edge_false
->dest
);
2042 gphi
*phi
= create_phi_node (iv3
, edge_true
->dest
);
2043 add_phi_arg (phi
, iv2
, edge_true
, UNKNOWN_LOCATION
);
2044 add_phi_arg (phi
, build_zero_cst (m_limb_type
),
2045 edge_false
, UNKNOWN_LOCATION
);
2046 m_gsi
= gsi_after_labels (edge_true
->dest
);
2050 g
= gimple_build_assign (make_ssa_name (m_limb_type
), RSHIFT_EXPR
,
2051 iv
, build_int_cst (unsigned_type_node
, bo_bit
));
2053 iv
= gimple_assign_lhs (g
);
2056 g
= gimple_build_assign (make_ssa_name (m_limb_type
), LSHIFT_EXPR
,
2057 iv2
, build_int_cst (unsigned_type_node
,
2058 limb_prec
- bo_bit
));
2060 g
= gimple_build_assign (make_ssa_name (m_limb_type
), BIT_IOR_EXPR
,
2061 gimple_assign_lhs (g
), iv
);
2063 iv
= gimple_assign_lhs (g
);
2064 if (m_data
[m_data_cnt
])
2065 m_data
[m_data_cnt
] = iv2
;
2067 if (tree_fits_uhwi_p (idx
))
2069 tree atype
= limb_access_type (rhs_type
, idx
);
2070 if (!useless_type_conversion_p (atype
, TREE_TYPE (iv
)))
2071 iv
= add_cast (atype
, iv
);
2078 /* Use write_p = true for loads with EH edges to make
2079 sure limb_access doesn't add a cast as separate
2080 statement after it. */
2081 rhs1
= limb_access (rhs_type
, rhs1
, idx
, eh
);
2082 tree ret
= make_ssa_name (TREE_TYPE (rhs1
));
2083 g
= gimple_build_assign (ret
, rhs1
);
2087 maybe_duplicate_eh_stmt (g
, stmt
);
2090 edge e
= split_block (gsi_bb (m_gsi
), g
);
2091 m_gsi
= gsi_after_labels (e
->dest
);
2092 add_eh_edge (e
->src
, eh_edge
);
2094 if (tree_fits_uhwi_p (idx
))
2096 tree atype
= limb_access_type (rhs_type
, idx
);
2097 if (!useless_type_conversion_p (atype
, TREE_TYPE (rhs1
)))
2098 ret
= add_cast (atype
, ret
);
2104 /* Return a limb IDX from a mergeable statement STMT. */
2107 bitint_large_huge::handle_stmt (gimple
*stmt
, tree idx
)
2109 tree lhs
, rhs1
, rhs2
= NULL_TREE
;
2111 switch (gimple_code (stmt
))
2114 if (gimple_assign_load_p (stmt
))
2115 return handle_load (stmt
, idx
);
2116 switch (gimple_assign_rhs_code (stmt
))
2121 rhs2
= handle_operand (gimple_assign_rhs2 (stmt
), idx
);
2124 rhs1
= handle_operand (gimple_assign_rhs1 (stmt
), idx
);
2125 lhs
= make_ssa_name (TREE_TYPE (rhs1
));
2126 g
= gimple_build_assign (lhs
, gimple_assign_rhs_code (stmt
),
2132 rhs1
= handle_operand (gimple_assign_rhs1 (stmt
), idx
);
2133 rhs2
= handle_operand (gimple_assign_rhs2 (stmt
), idx
);
2134 return handle_plus_minus (gimple_assign_rhs_code (stmt
),
2137 rhs2
= handle_operand (gimple_assign_rhs1 (stmt
), idx
);
2138 rhs1
= build_zero_cst (TREE_TYPE (rhs2
));
2139 return handle_plus_minus (MINUS_EXPR
, rhs1
, rhs2
, idx
);
2141 return handle_lshift (handle_operand (gimple_assign_rhs1 (stmt
),
2143 gimple_assign_rhs2 (stmt
), idx
);
2146 return handle_operand (gimple_assign_rhs1 (stmt
), idx
);
2148 return handle_cast (TREE_TYPE (gimple_assign_lhs (stmt
)),
2149 gimple_assign_rhs1 (stmt
), idx
);
2150 case VIEW_CONVERT_EXPR
:
2151 return handle_cast (TREE_TYPE (gimple_assign_lhs (stmt
)),
2152 TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0),
2155 return handle_bit_field_ref (gimple_assign_rhs1 (stmt
), idx
);
2166 /* Return minimum precision of OP at STMT.
2167 Positive value is minimum precision above which all bits
2168 are zero, negative means all bits above negation of the
2169 value are copies of the sign bit. */
2172 range_to_prec (tree op
, gimple
*stmt
)
2176 tree type
= TREE_TYPE (op
);
2177 unsigned int prec
= TYPE_PRECISION (type
);
2180 || !get_range_query (cfun
)->range_of_expr (r
, op
, stmt
)
2181 || r
.undefined_p ())
2183 if (TYPE_UNSIGNED (type
))
2186 return MIN ((int) -prec
, -2);
2189 if (!TYPE_UNSIGNED (TREE_TYPE (op
)))
2191 w
= r
.lower_bound ();
2194 int min_prec1
= wi::min_precision (w
, SIGNED
);
2195 w
= r
.upper_bound ();
2196 int min_prec2
= wi::min_precision (w
, SIGNED
);
2197 int min_prec
= MAX (min_prec1
, min_prec2
);
2198 return MIN (-min_prec
, -2);
2202 w
= r
.upper_bound ();
2203 int min_prec
= wi::min_precision (w
, UNSIGNED
);
2204 return MAX (min_prec
, 1);
2207 /* Return address of the first limb of OP and write into *PREC
2208 its precision. If positive, the operand is zero extended
2209 from that precision, if it is negative, the operand is sign-extended
2210 from -*PREC. If PREC_STORED is NULL, it is the toplevel call,
2211 otherwise *PREC_STORED is prec from the innermost call without
2212 range optimizations. */
2215 bitint_large_huge::handle_operand_addr (tree op
, gimple
*stmt
,
2216 int *prec_stored
, int *prec
)
2219 location_t loc_save
= m_loc
;
2220 if ((TREE_CODE (TREE_TYPE (op
)) != BITINT_TYPE
2221 || bitint_precision_kind (TREE_TYPE (op
)) < bitint_prec_large
)
2222 && TREE_CODE (op
) != INTEGER_CST
)
2225 *prec
= range_to_prec (op
, stmt
);
2226 bitint_prec_kind kind
= bitint_prec_small
;
2227 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (op
)));
2228 if (TREE_CODE (TREE_TYPE (op
)) == BITINT_TYPE
)
2229 kind
= bitint_precision_kind (TREE_TYPE (op
));
2230 if (kind
== bitint_prec_middle
)
2232 tree type
= NULL_TREE
;
2233 op
= maybe_cast_middle_bitint (&m_gsi
, op
, type
);
2235 tree op_type
= TREE_TYPE (op
);
2236 unsigned HOST_WIDE_INT nelts
2237 = CEIL (TYPE_PRECISION (op_type
), limb_prec
);
2238 /* Add support for 3 or more limbs filled in from normal
2239 integral type if this assert fails. If no target chooses
2240 limb mode smaller than half of largest supported normal
2241 integral type, this will not be needed. */
2242 gcc_assert (nelts
<= 2);
2244 *prec_stored
= (TYPE_UNSIGNED (op_type
)
2245 ? TYPE_PRECISION (op_type
)
2246 : -TYPE_PRECISION (op_type
));
2247 if (*prec
<= limb_prec
&& *prec
>= -limb_prec
)
2252 if (TYPE_UNSIGNED (op_type
))
2254 if (*prec_stored
> limb_prec
)
2255 *prec_stored
= limb_prec
;
2257 else if (*prec_stored
< -limb_prec
)
2258 *prec_stored
= -limb_prec
;
2261 tree atype
= build_array_type_nelts (m_limb_type
, nelts
);
2262 tree var
= create_tmp_var (atype
);
2264 if (!useless_type_conversion_p (m_limb_type
, op_type
))
2265 t1
= add_cast (m_limb_type
, t1
);
2266 tree v
= build4 (ARRAY_REF
, m_limb_type
, var
, size_zero_node
,
2267 NULL_TREE
, NULL_TREE
);
2268 gimple
*g
= gimple_build_assign (v
, t1
);
2272 tree lp
= build_int_cst (unsigned_type_node
, limb_prec
);
2273 g
= gimple_build_assign (make_ssa_name (op_type
),
2274 RSHIFT_EXPR
, op
, lp
);
2276 tree t2
= gimple_assign_lhs (g
);
2277 t2
= add_cast (m_limb_type
, t2
);
2278 v
= build4 (ARRAY_REF
, m_limb_type
, var
, size_one_node
,
2279 NULL_TREE
, NULL_TREE
);
2280 g
= gimple_build_assign (v
, t2
);
2283 tree ret
= build_fold_addr_expr (var
);
2284 if (!stmt_ends_bb_p (gsi_stmt (m_gsi
)))
2286 tree clobber
= build_clobber (atype
, CLOBBER_STORAGE_END
);
2287 g
= gimple_build_assign (var
, clobber
);
2288 gsi_insert_after (&m_gsi
, g
, GSI_SAME_STMT
);
2293 switch (TREE_CODE (op
))
2297 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (op
)))
2299 gimple
*g
= SSA_NAME_DEF_STMT (op
);
2301 m_loc
= gimple_location (g
);
2302 if (gimple_assign_load_p (g
))
2304 *prec
= range_to_prec (op
, NULL
);
2306 *prec_stored
= (TYPE_UNSIGNED (TREE_TYPE (op
))
2307 ? TYPE_PRECISION (TREE_TYPE (op
))
2308 : -TYPE_PRECISION (TREE_TYPE (op
)));
2309 ret
= build_fold_addr_expr (gimple_assign_rhs1 (g
));
2310 ret
= force_gimple_operand_gsi (&m_gsi
, ret
, true,
2311 NULL_TREE
, true, GSI_SAME_STMT
);
2313 else if (gimple_code (g
) == GIMPLE_NOP
)
2315 *prec
= TYPE_UNSIGNED (TREE_TYPE (op
)) ? limb_prec
: -limb_prec
;
2317 *prec_stored
= *prec
;
2318 tree var
= create_tmp_var (m_limb_type
);
2319 TREE_ADDRESSABLE (var
) = 1;
2320 ret
= build_fold_addr_expr (var
);
2321 if (!stmt_ends_bb_p (gsi_stmt (m_gsi
)))
2323 tree clobber
= build_clobber (m_limb_type
,
2324 CLOBBER_STORAGE_END
);
2325 g
= gimple_build_assign (var
, clobber
);
2326 gsi_insert_after (&m_gsi
, g
, GSI_SAME_STMT
);
2331 gcc_assert (gimple_assign_cast_p (g
));
2332 tree rhs1
= gimple_assign_rhs1 (g
);
2333 bitint_prec_kind kind
= bitint_prec_small
;
2334 if (TREE_CODE (rhs1
) == VIEW_CONVERT_EXPR
)
2335 rhs1
= TREE_OPERAND (rhs1
, 0);
2336 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (rhs1
)));
2337 if (TREE_CODE (TREE_TYPE (rhs1
)) == BITINT_TYPE
)
2338 kind
= bitint_precision_kind (TREE_TYPE (rhs1
));
2339 if (kind
>= bitint_prec_large
)
2341 tree lhs_type
= TREE_TYPE (op
);
2342 tree rhs_type
= TREE_TYPE (rhs1
);
2343 int prec_stored_val
= 0;
2344 ret
= handle_operand_addr (rhs1
, g
, &prec_stored_val
, prec
);
2345 if (TYPE_PRECISION (lhs_type
) > TYPE_PRECISION (rhs_type
))
2347 if (TYPE_UNSIGNED (lhs_type
)
2348 && !TYPE_UNSIGNED (rhs_type
))
2349 gcc_assert (*prec
>= 0 || prec_stored
== NULL
);
2353 if (*prec
> 0 && *prec
< TYPE_PRECISION (lhs_type
))
2355 else if (TYPE_UNSIGNED (lhs_type
))
2357 gcc_assert (*prec
> 0
2358 || prec_stored_val
> 0
2359 || (-prec_stored_val
2360 >= TYPE_PRECISION (lhs_type
)));
2361 *prec
= TYPE_PRECISION (lhs_type
);
2363 else if (*prec
< 0 && -*prec
< TYPE_PRECISION (lhs_type
))
2366 *prec
= -TYPE_PRECISION (lhs_type
);
2381 int p
= var_to_partition (m_map
, op
);
2382 gcc_assert (m_vars
[p
] != NULL_TREE
);
2383 *prec
= range_to_prec (op
, stmt
);
2385 *prec_stored
= (TYPE_UNSIGNED (TREE_TYPE (op
))
2386 ? TYPE_PRECISION (TREE_TYPE (op
))
2387 : -TYPE_PRECISION (TREE_TYPE (op
)));
2388 return build_fold_addr_expr (m_vars
[p
]);
2391 unsigned int min_prec
, mp
;
2393 w
= wi::to_wide (op
);
2394 if (tree_int_cst_sgn (op
) >= 0)
2396 min_prec
= wi::min_precision (w
, UNSIGNED
);
2397 *prec
= MAX (min_prec
, 1);
2401 min_prec
= wi::min_precision (w
, SIGNED
);
2402 *prec
= MIN ((int) -min_prec
, -2);
2404 mp
= CEIL (min_prec
, limb_prec
) * limb_prec
;
2407 if (mp
>= (unsigned) TYPE_PRECISION (TREE_TYPE (op
))
2408 && (TREE_CODE (TREE_TYPE (op
)) == BITINT_TYPE
2409 || TYPE_PRECISION (TREE_TYPE (op
)) <= limb_prec
))
2410 type
= TREE_TYPE (op
);
2412 type
= build_bitint_type (mp
, 1);
2413 if (TREE_CODE (type
) != BITINT_TYPE
2414 || bitint_precision_kind (type
) == bitint_prec_small
)
2416 if (TYPE_PRECISION (type
) <= limb_prec
)
2420 while (bitint_precision_kind (mp
) == bitint_prec_small
)
2422 /* This case is for targets which e.g. have 64-bit
2423 limb but categorize up to 128-bits _BitInts as
2424 small. We could use type of m_limb_type[2] and
2425 similar instead to save space. */
2426 type
= build_bitint_type (mp
, 1);
2431 if (tree_int_cst_sgn (op
) >= 0)
2432 *prec_stored
= MAX (TYPE_PRECISION (type
), 1);
2434 *prec_stored
= MIN ((int) -TYPE_PRECISION (type
), -2);
2436 op
= tree_output_constant_def (fold_convert (type
, op
));
2437 return build_fold_addr_expr (op
);
2443 /* Helper function, create a loop before the current location,
2444 start with sizetype INIT value from the preheader edge. Return
2445 a PHI result and set *IDX_NEXT to SSA_NAME it creates and uses
2446 from the latch edge. */
2449 bitint_large_huge::create_loop (tree init
, tree
*idx_next
)
2451 if (!gsi_end_p (m_gsi
))
2454 m_gsi
= gsi_last_bb (gsi_bb (m_gsi
));
2455 edge e1
= split_block (gsi_bb (m_gsi
), gsi_stmt (m_gsi
));
2456 edge e2
= split_block (e1
->dest
, (gimple
*) NULL
);
2457 edge e3
= make_edge (e1
->dest
, e1
->dest
, EDGE_TRUE_VALUE
);
2458 e3
->probability
= profile_probability::very_unlikely ();
2459 e2
->flags
= EDGE_FALSE_VALUE
;
2460 e2
->probability
= e3
->probability
.invert ();
2461 tree idx
= make_ssa_name (sizetype
);
2462 gphi
*phi
= create_phi_node (idx
, e1
->dest
);
2463 add_phi_arg (phi
, init
, e1
, UNKNOWN_LOCATION
);
2464 *idx_next
= make_ssa_name (sizetype
);
2465 add_phi_arg (phi
, *idx_next
, e3
, UNKNOWN_LOCATION
);
2466 m_gsi
= gsi_after_labels (e1
->dest
);
2468 m_preheader_bb
= e1
->src
;
2469 class loop
*loop
= alloc_loop ();
2470 loop
->header
= e1
->dest
;
2471 add_loop (loop
, e1
->src
->loop_father
);
2475 /* Lower large/huge _BitInt statement mergeable or similar STMT which can be
2476 lowered using iteration from the least significant limb up to the most
2477 significant limb. For large _BitInt it is emitted as straight line code
2478 before current location, for huge _BitInt as a loop handling two limbs
2479 at once, followed by handling up to limbs in straight line code (at most
2480 one full and one partial limb). It can also handle EQ_EXPR/NE_EXPR
2481 comparisons, in that case CMP_CODE should be the comparison code and
2482 CMP_OP1/CMP_OP2 the comparison operands. */
2485 bitint_large_huge::lower_mergeable_stmt (gimple
*stmt
, tree_code
&cmp_code
,
2486 tree cmp_op1
, tree cmp_op2
)
2488 bool eq_p
= cmp_code
!= ERROR_MARK
;
2491 type
= TREE_TYPE (cmp_op1
);
2493 type
= TREE_TYPE (gimple_assign_lhs (stmt
));
2494 gcc_assert (TREE_CODE (type
) == BITINT_TYPE
);
2495 bitint_prec_kind kind
= bitint_precision_kind (type
);
2496 gcc_assert (kind
>= bitint_prec_large
);
2498 tree lhs
= gimple_get_lhs (stmt
);
2499 tree rhs1
, lhs_type
= lhs
? TREE_TYPE (lhs
) : NULL_TREE
;
2501 && TREE_CODE (lhs
) == SSA_NAME
2502 && TREE_CODE (TREE_TYPE (lhs
)) == BITINT_TYPE
2503 && bitint_precision_kind (TREE_TYPE (lhs
)) >= bitint_prec_large
)
2505 int p
= var_to_partition (m_map
, lhs
);
2506 gcc_assert (m_vars
[p
] != NULL_TREE
);
2507 m_lhs
= lhs
= m_vars
[p
];
2509 unsigned cnt
, rem
= 0, end
= 0, prec
= TYPE_PRECISION (type
);
2511 tree ext
= NULL_TREE
, store_operand
= NULL_TREE
;
2513 basic_block eh_pad
= NULL
;
2514 tree nlhs
= NULL_TREE
;
2515 unsigned HOST_WIDE_INT bo_idx
= 0;
2516 unsigned HOST_WIDE_INT bo_bit
= 0;
2517 tree bf_cur
= NULL_TREE
, bf_next
= NULL_TREE
;
2518 if (gimple_store_p (stmt
))
2520 store_operand
= gimple_assign_rhs1 (stmt
);
2521 eh
= stmt_ends_bb_p (stmt
);
2526 basic_block bb
= gimple_bb (stmt
);
2528 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
2529 if (e
->flags
& EDGE_EH
)
2535 if (TREE_CODE (lhs
) == COMPONENT_REF
2536 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs
, 1)))
2538 tree fld
= TREE_OPERAND (lhs
, 1);
2539 gcc_assert (tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld
)));
2540 tree repr
= DECL_BIT_FIELD_REPRESENTATIVE (fld
);
2541 poly_int64 bitoffset
;
2542 poly_uint64 field_offset
, repr_offset
;
2543 if ((tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
)) % BITS_PER_UNIT
) == 0)
2547 bool var_field_off
= false;
2548 if (poly_int_tree_p (DECL_FIELD_OFFSET (fld
), &field_offset
)
2549 && poly_int_tree_p (DECL_FIELD_OFFSET (repr
), &repr_offset
))
2550 bitoffset
= (field_offset
- repr_offset
) * BITS_PER_UNIT
;
2554 var_field_off
= true;
2556 bitoffset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
))
2557 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr
)));
2558 nlhs
= build3 (COMPONENT_REF
, TREE_TYPE (repr
),
2559 TREE_OPERAND (lhs
, 0), repr
,
2561 ? TREE_OPERAND (lhs
, 2) : NULL_TREE
);
2562 HOST_WIDE_INT bo
= bitoffset
.to_constant ();
2563 bo_idx
= (unsigned HOST_WIDE_INT
) bo
/ limb_prec
;
2564 bo_bit
= (unsigned HOST_WIDE_INT
) bo
% limb_prec
;
2569 && TREE_CODE (store_operand
) == SSA_NAME
2571 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (store_operand
)))
2572 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (store_operand
)))
2573 || gimple_assign_cast_p (stmt
))
2575 rhs1
= gimple_assign_rhs1 (store_operand
2576 ? SSA_NAME_DEF_STMT (store_operand
)
2578 if (TREE_CODE (rhs1
) == VIEW_CONVERT_EXPR
)
2579 rhs1
= TREE_OPERAND (rhs1
, 0);
2580 /* Optimize mergeable ops ending with widening cast to _BitInt
2581 (or followed by store). We can lower just the limbs of the
2582 cast operand and widen afterwards. */
2583 if (TREE_CODE (rhs1
) == SSA_NAME
2585 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (rhs1
)))
2586 && TREE_CODE (TREE_TYPE (rhs1
)) == BITINT_TYPE
2587 && bitint_precision_kind (TREE_TYPE (rhs1
)) >= bitint_prec_large
2588 && (CEIL ((unsigned) TYPE_PRECISION (TREE_TYPE (rhs1
)),
2589 limb_prec
) < CEIL (prec
, limb_prec
)
2590 || (kind
== bitint_prec_huge
2591 && TYPE_PRECISION (TREE_TYPE (rhs1
)) < prec
)))
2593 store_operand
= rhs1
;
2594 prec
= TYPE_PRECISION (TREE_TYPE (rhs1
));
2595 kind
= bitint_precision_kind (TREE_TYPE (rhs1
));
2596 if (!TYPE_UNSIGNED (TREE_TYPE (rhs1
)))
2600 tree idx
= NULL_TREE
, idx_first
= NULL_TREE
, idx_next
= NULL_TREE
;
2601 if (kind
== bitint_prec_large
)
2602 cnt
= CEIL (prec
, limb_prec
);
2605 rem
= (prec
% (2 * limb_prec
));
2606 end
= (prec
- rem
) / limb_prec
;
2607 cnt
= 2 + CEIL (rem
, limb_prec
);
2608 idx
= idx_first
= create_loop (size_zero_node
, &idx_next
);
2611 basic_block edge_bb
= NULL
;
2614 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
2616 edge e
= split_block (gsi_bb (gsi
), gsi_stmt (gsi
));
2618 if (kind
== bitint_prec_large
)
2619 m_gsi
= gsi_end_bb (edge_bb
);
2622 m_after_stmt
= stmt
;
2623 if (kind
!= bitint_prec_large
)
2624 m_upwards_2limb
= end
;
2628 = (prec
!= (unsigned) TYPE_PRECISION (type
)
2629 && (CEIL ((unsigned) TYPE_PRECISION (type
), limb_prec
)
2630 > CEIL (prec
, limb_prec
)));
2632 for (unsigned i
= 0; i
< cnt
; i
++)
2635 if (kind
== bitint_prec_large
)
2638 idx
= size_int (end
+ (i
> 2));
2641 rhs1
= handle_operand (cmp_op1
, idx
);
2642 tree rhs2
= handle_operand (cmp_op2
, idx
);
2643 g
= gimple_build_cond (NE_EXPR
, rhs1
, rhs2
, NULL_TREE
, NULL_TREE
);
2645 edge e1
= split_block (gsi_bb (m_gsi
), g
);
2646 e1
->flags
= EDGE_FALSE_VALUE
;
2647 edge e2
= make_edge (e1
->src
, gimple_bb (stmt
), EDGE_TRUE_VALUE
);
2648 e1
->probability
= profile_probability::unlikely ();
2649 e2
->probability
= e1
->probability
.invert ();
2651 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e2
->src
);
2652 m_gsi
= gsi_after_labels (e1
->dest
);
2657 rhs1
= handle_operand (store_operand
, idx
);
2659 rhs1
= handle_stmt (stmt
, idx
);
2660 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (rhs1
)))
2661 rhs1
= add_cast (m_limb_type
, rhs1
);
2662 if (sext
&& i
== cnt
- 1)
2667 if (tree_fits_uhwi_p (idx
))
2668 nidx
= size_int (tree_to_uhwi (idx
) + bo_idx
);
2671 nidx
= make_ssa_name (sizetype
);
2672 g
= gimple_build_assign (nidx
, PLUS_EXPR
, idx
,
2678 basic_block new_bb
= NULL
;
2679 /* Handle stores into bit-fields. */
2685 if (kind
!= bitint_prec_large
)
2687 prepare_data_in_out (build_zero_cst (m_limb_type
),
2689 bf_next
= m_data
.pop ();
2690 bf_cur
= m_data
.pop ();
2691 g
= gimple_build_cond (EQ_EXPR
, idx
, size_zero_node
,
2692 NULL_TREE
, NULL_TREE
);
2694 if_then_else (g
, profile_probability::unlikely (),
2699 = build_nonstandard_integer_type (limb_prec
- bo_bit
, 1);
2700 tree bfr
= build_bit_field_ref (ftype
, unshare_expr (nlhs
),
2702 bo_idx
* limb_prec
+ bo_bit
);
2703 tree t
= add_cast (ftype
, rhs1
);
2704 g
= gimple_build_assign (bfr
, t
);
2708 maybe_duplicate_eh_stmt (g
, stmt
);
2711 edge e
= split_block (gsi_bb (m_gsi
), g
);
2712 m_gsi
= gsi_after_labels (e
->dest
);
2713 add_eh_edge (e
->src
,
2714 find_edge (gimple_bb (stmt
), eh_pad
));
2717 if (kind
== bitint_prec_large
)
2723 m_gsi
= gsi_after_labels (e2
->src
);
2727 tree t1
= make_ssa_name (m_limb_type
);
2728 tree t2
= make_ssa_name (m_limb_type
);
2729 tree t3
= make_ssa_name (m_limb_type
);
2730 g
= gimple_build_assign (t1
, RSHIFT_EXPR
, bf_cur
,
2731 build_int_cst (unsigned_type_node
,
2732 limb_prec
- bo_bit
));
2734 g
= gimple_build_assign (t2
, LSHIFT_EXPR
, rhs1
,
2735 build_int_cst (unsigned_type_node
,
2739 g
= gimple_build_assign (t3
, BIT_IOR_EXPR
, t1
, t2
);
2742 if (bf_next
&& i
== 1)
2744 g
= gimple_build_assign (bf_next
, bf_cur
);
2751 /* Handle bit-field access to partial last limb if needed. */
2755 && tree_fits_uhwi_p (idx
))
2757 unsigned int tprec
= TYPE_PRECISION (type
);
2758 unsigned int rprec
= (tprec
- 1) % limb_prec
+ 1;
2759 if (rprec
+ bo_bit
< (unsigned) limb_prec
)
2762 = build_nonstandard_integer_type (rprec
+ bo_bit
, 1);
2764 = build_bit_field_ref (ftype
, unshare_expr (nlhs
),
2766 (bo_idx
+ tprec
/ limb_prec
)
2768 tree t
= add_cast (ftype
, rhs1
);
2769 g
= gimple_build_assign (bfr
, t
);
2773 else if (rprec
+ bo_bit
== (unsigned) limb_prec
)
2776 /* Otherwise, stores to any other lhs. */
2779 tree l
= limb_access (nlhs
? NULL_TREE
: lhs_type
,
2780 nlhs
? nlhs
: lhs
, nidx
, true);
2781 g
= gimple_build_assign (l
, rhs1
);
2786 maybe_duplicate_eh_stmt (g
, stmt
);
2789 edge e
= split_block (gsi_bb (m_gsi
), g
);
2790 m_gsi
= gsi_after_labels (e
->dest
);
2791 add_eh_edge (e
->src
,
2792 find_edge (gimple_bb (stmt
), eh_pad
));
2796 m_gsi
= gsi_after_labels (new_bb
);
2800 if (kind
== bitint_prec_huge
&& i
<= 1)
2804 idx
= make_ssa_name (sizetype
);
2805 g
= gimple_build_assign (idx
, PLUS_EXPR
, idx_first
,
2811 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx_first
,
2814 g
= gimple_build_cond (NE_EXPR
, idx_next
, size_int (end
),
2815 NULL_TREE
, NULL_TREE
);
2818 m_gsi
= gsi_after_labels (edge_bb
);
2820 m_gsi
= gsi_for_stmt (stmt
);
2830 ext
= add_cast (signed_type_for (m_limb_type
), ext
);
2831 tree lpm1
= build_int_cst (unsigned_type_node
,
2833 tree n
= make_ssa_name (TREE_TYPE (ext
));
2834 g
= gimple_build_assign (n
, RSHIFT_EXPR
, ext
, lpm1
);
2836 ext
= add_cast (m_limb_type
, n
);
2839 ext
= build_zero_cst (m_limb_type
);
2840 kind
= bitint_precision_kind (type
);
2841 unsigned start
= CEIL (prec
, limb_prec
);
2842 prec
= TYPE_PRECISION (type
);
2843 idx
= idx_first
= idx_next
= NULL_TREE
;
2844 if (prec
<= (start
+ 2 + (bo_bit
!= 0)) * limb_prec
)
2845 kind
= bitint_prec_large
;
2846 if (kind
== bitint_prec_large
)
2847 cnt
= CEIL (prec
, limb_prec
) - start
;
2850 rem
= prec
% limb_prec
;
2851 end
= (prec
- rem
) / limb_prec
;
2852 cnt
= (bo_bit
!= 0) + 1 + (rem
!= 0);
2854 for (unsigned i
= 0; i
< cnt
; i
++)
2856 if (kind
== bitint_prec_large
|| (i
== 0 && bo_bit
!= 0))
2857 idx
= size_int (start
+ i
);
2858 else if (i
== cnt
- 1 && (rem
!= 0))
2859 idx
= size_int (end
);
2860 else if (i
== (bo_bit
!= 0))
2861 idx
= create_loop (size_int (start
+ i
), &idx_next
);
2863 if (bf_cur
!= NULL_TREE
&& bf_cur
!= ext
)
2865 tree t1
= make_ssa_name (m_limb_type
);
2866 g
= gimple_build_assign (t1
, RSHIFT_EXPR
, bf_cur
,
2867 build_int_cst (unsigned_type_node
,
2868 limb_prec
- bo_bit
));
2870 if (integer_zerop (ext
))
2874 tree t2
= make_ssa_name (m_limb_type
);
2875 rhs1
= make_ssa_name (m_limb_type
);
2876 g
= gimple_build_assign (t2
, LSHIFT_EXPR
, ext
,
2877 build_int_cst (unsigned_type_node
,
2880 g
= gimple_build_assign (rhs1
, BIT_IOR_EXPR
, t1
, t2
);
2888 if (tree_fits_uhwi_p (idx
))
2889 nidx
= size_int (tree_to_uhwi (idx
) + bo_idx
);
2892 nidx
= make_ssa_name (sizetype
);
2893 g
= gimple_build_assign (nidx
, PLUS_EXPR
, idx
,
2899 /* Handle bit-field access to partial last limb if needed. */
2900 if (nlhs
&& i
== cnt
- 1)
2902 unsigned int tprec
= TYPE_PRECISION (type
);
2903 unsigned int rprec
= (tprec
- 1) % limb_prec
+ 1;
2904 if (rprec
+ bo_bit
< (unsigned) limb_prec
)
2907 = build_nonstandard_integer_type (rprec
+ bo_bit
, 1);
2909 = build_bit_field_ref (ftype
, unshare_expr (nlhs
),
2911 (bo_idx
+ tprec
/ limb_prec
)
2913 tree t
= add_cast (ftype
, rhs1
);
2914 g
= gimple_build_assign (bfr
, t
);
2918 else if (rprec
+ bo_bit
== (unsigned) limb_prec
)
2921 /* Otherwise, stores to any other lhs. */
2924 tree l
= limb_access (nlhs
? NULL_TREE
: lhs_type
,
2925 nlhs
? nlhs
: lhs
, nidx
, true);
2926 g
= gimple_build_assign (l
, rhs1
);
2931 maybe_duplicate_eh_stmt (g
, stmt
);
2934 edge e
= split_block (gsi_bb (m_gsi
), g
);
2935 m_gsi
= gsi_after_labels (e
->dest
);
2936 add_eh_edge (e
->src
, find_edge (gimple_bb (stmt
), eh_pad
));
2939 if (kind
== bitint_prec_huge
&& i
== (bo_bit
!= 0))
2941 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
,
2944 g
= gimple_build_cond (NE_EXPR
, idx_next
, size_int (end
),
2945 NULL_TREE
, NULL_TREE
);
2947 m_gsi
= gsi_for_stmt (stmt
);
2952 if (bf_cur
!= NULL_TREE
)
2954 unsigned int tprec
= TYPE_PRECISION (type
);
2955 unsigned int rprec
= (tprec
+ bo_bit
) % limb_prec
;
2956 tree ftype
= build_nonstandard_integer_type (rprec
, 1);
2957 tree bfr
= build_bit_field_ref (ftype
, unshare_expr (nlhs
),
2959 (bo_idx
+ (tprec
+ bo_bit
) / limb_prec
)
2964 rhs1
= make_ssa_name (TREE_TYPE (rhs1
));
2965 g
= gimple_build_assign (rhs1
, RSHIFT_EXPR
, bf_cur
,
2966 build_int_cst (unsigned_type_node
,
2967 limb_prec
- bo_bit
));
2970 rhs1
= add_cast (ftype
, rhs1
);
2971 g
= gimple_build_assign (bfr
, rhs1
);
2975 maybe_duplicate_eh_stmt (g
, stmt
);
2978 edge e
= split_block (gsi_bb (m_gsi
), g
);
2979 m_gsi
= gsi_after_labels (e
->dest
);
2980 add_eh_edge (e
->src
, find_edge (gimple_bb (stmt
), eh_pad
));
2985 if (gimple_store_p (stmt
))
2987 unlink_stmt_vdef (stmt
);
2988 release_ssa_name (gimple_vdef (stmt
));
2989 gsi_remove (&m_gsi
, true);
2993 lhs
= make_ssa_name (boolean_type_node
);
2994 basic_block bb
= gimple_bb (stmt
);
2995 gphi
*phi
= create_phi_node (lhs
, bb
);
2996 edge e
= find_edge (gsi_bb (m_gsi
), bb
);
2997 unsigned int n
= EDGE_COUNT (bb
->preds
);
2998 for (unsigned int i
= 0; i
< n
; i
++)
3000 edge e2
= EDGE_PRED (bb
, i
);
3001 add_phi_arg (phi
, e
== e2
? boolean_true_node
: boolean_false_node
,
3002 e2
, UNKNOWN_LOCATION
);
3004 cmp_code
= cmp_code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3011 /* Handle a large/huge _BitInt comparison statement STMT other than
3012 EQ_EXPR/NE_EXPR. CMP_CODE, CMP_OP1 and CMP_OP2 meaning is like in
3013 lower_mergeable_stmt. The {GT,GE,LT,LE}_EXPR comparisons are
3014 lowered by iteration from the most significant limb downwards to
3015 the least significant one, for large _BitInt in straight line code,
3016 otherwise with most significant limb handled in
3017 straight line code followed by a loop handling one limb at a time.
3018 Comparisons with unsigned huge _BitInt with precisions which are
3019 multiples of limb precision can use just the loop and don't need to
3020 handle most significant limb before the loop. The loop or straight
3021 line code jumps to final basic block if a particular pair of limbs
3025 bitint_large_huge::lower_comparison_stmt (gimple
*stmt
, tree_code
&cmp_code
,
3026 tree cmp_op1
, tree cmp_op2
)
3028 tree type
= TREE_TYPE (cmp_op1
);
3029 gcc_assert (TREE_CODE (type
) == BITINT_TYPE
);
3030 bitint_prec_kind kind
= bitint_precision_kind (type
);
3031 gcc_assert (kind
>= bitint_prec_large
);
3033 if (!TYPE_UNSIGNED (type
)
3034 && integer_zerop (cmp_op2
)
3035 && (cmp_code
== GE_EXPR
|| cmp_code
== LT_EXPR
))
3037 unsigned end
= CEIL ((unsigned) TYPE_PRECISION (type
), limb_prec
) - 1;
3038 tree idx
= size_int (end
);
3040 tree rhs1
= handle_operand (cmp_op1
, idx
);
3041 if (TYPE_UNSIGNED (TREE_TYPE (rhs1
)))
3043 tree stype
= signed_type_for (TREE_TYPE (rhs1
));
3044 rhs1
= add_cast (stype
, rhs1
);
3046 tree lhs
= make_ssa_name (boolean_type_node
);
3047 g
= gimple_build_assign (lhs
, cmp_code
, rhs1
,
3048 build_zero_cst (TREE_TYPE (rhs1
)));
3054 unsigned cnt
, rem
= 0, end
= 0;
3055 tree idx
= NULL_TREE
, idx_next
= NULL_TREE
;
3056 if (kind
== bitint_prec_large
)
3057 cnt
= CEIL ((unsigned) TYPE_PRECISION (type
), limb_prec
);
3060 rem
= ((unsigned) TYPE_PRECISION (type
) % limb_prec
);
3061 if (rem
== 0 && !TYPE_UNSIGNED (type
))
3063 end
= ((unsigned) TYPE_PRECISION (type
) - rem
) / limb_prec
;
3064 cnt
= 1 + (rem
!= 0);
3067 basic_block edge_bb
= NULL
;
3068 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
3070 edge e
= split_block (gsi_bb (gsi
), gsi_stmt (gsi
));
3072 m_gsi
= gsi_end_bb (edge_bb
);
3074 edge
*edges
= XALLOCAVEC (edge
, cnt
* 2);
3075 for (unsigned i
= 0; i
< cnt
; i
++)
3078 if (kind
== bitint_prec_large
)
3079 idx
= size_int (cnt
- i
- 1);
3080 else if (i
== cnt
- 1)
3081 idx
= create_loop (size_int (end
- 1), &idx_next
);
3083 idx
= size_int (end
);
3084 tree rhs1
= handle_operand (cmp_op1
, idx
);
3085 tree rhs2
= handle_operand (cmp_op2
, idx
);
3087 && !TYPE_UNSIGNED (type
)
3088 && TYPE_UNSIGNED (TREE_TYPE (rhs1
)))
3090 tree stype
= signed_type_for (TREE_TYPE (rhs1
));
3091 rhs1
= add_cast (stype
, rhs1
);
3092 rhs2
= add_cast (stype
, rhs2
);
3094 g
= gimple_build_cond (GT_EXPR
, rhs1
, rhs2
, NULL_TREE
, NULL_TREE
);
3096 edge e1
= split_block (gsi_bb (m_gsi
), g
);
3097 e1
->flags
= EDGE_FALSE_VALUE
;
3098 edge e2
= make_edge (e1
->src
, gimple_bb (stmt
), EDGE_TRUE_VALUE
);
3099 e1
->probability
= profile_probability::likely ();
3100 e2
->probability
= e1
->probability
.invert ();
3102 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e2
->src
);
3103 m_gsi
= gsi_after_labels (e1
->dest
);
3105 g
= gimple_build_cond (LT_EXPR
, rhs1
, rhs2
, NULL_TREE
, NULL_TREE
);
3107 e1
= split_block (gsi_bb (m_gsi
), g
);
3108 e1
->flags
= EDGE_FALSE_VALUE
;
3109 e2
= make_edge (e1
->src
, gimple_bb (stmt
), EDGE_TRUE_VALUE
);
3110 e1
->probability
= profile_probability::unlikely ();
3111 e2
->probability
= e1
->probability
.invert ();
3112 m_gsi
= gsi_after_labels (e1
->dest
);
3113 edges
[2 * i
+ 1] = e2
;
3115 if (kind
== bitint_prec_huge
&& i
== cnt
- 1)
3117 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_int (-1));
3119 g
= gimple_build_cond (NE_EXPR
, idx
, size_zero_node
,
3120 NULL_TREE
, NULL_TREE
);
3122 edge true_edge
, false_edge
;
3123 extract_true_false_edges_from_block (gsi_bb (m_gsi
),
3124 &true_edge
, &false_edge
);
3125 m_gsi
= gsi_after_labels (false_edge
->dest
);
3130 tree lhs
= make_ssa_name (boolean_type_node
);
3131 basic_block bb
= gimple_bb (stmt
);
3132 gphi
*phi
= create_phi_node (lhs
, bb
);
3133 for (unsigned int i
= 0; i
< cnt
* 2; i
++)
3135 tree val
= ((cmp_code
== GT_EXPR
|| cmp_code
== GE_EXPR
)
3136 ^ (i
& 1)) ? boolean_true_node
: boolean_false_node
;
3137 add_phi_arg (phi
, val
, edges
[i
], UNKNOWN_LOCATION
);
3139 add_phi_arg (phi
, (cmp_code
== GE_EXPR
|| cmp_code
== LE_EXPR
)
3140 ? boolean_true_node
: boolean_false_node
,
3141 find_edge (gsi_bb (m_gsi
), bb
), UNKNOWN_LOCATION
);
3146 /* Lower large/huge _BitInt left and right shift except for left
3147 shift by < limb_prec constant. */
3150 bitint_large_huge::lower_shift_stmt (tree obj
, gimple
*stmt
)
3152 tree rhs1
= gimple_assign_rhs1 (stmt
);
3153 tree lhs
= gimple_assign_lhs (stmt
);
3154 tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3155 tree type
= TREE_TYPE (rhs1
);
3156 gimple
*final_stmt
= gsi_stmt (m_gsi
);
3157 gcc_assert (TREE_CODE (type
) == BITINT_TYPE
3158 && bitint_precision_kind (type
) >= bitint_prec_large
);
3159 int prec
= TYPE_PRECISION (type
);
3160 tree n
= gimple_assign_rhs2 (stmt
), n1
, n2
, n3
, n4
;
3162 if (obj
== NULL_TREE
)
3164 int part
= var_to_partition (m_map
, lhs
);
3165 gcc_assert (m_vars
[part
] != NULL_TREE
);
3168 /* Preparation code common for both left and right shifts.
3169 unsigned n1 = n % limb_prec;
3170 size_t n2 = n / limb_prec;
3171 size_t n3 = n1 != 0;
3172 unsigned n4 = (limb_prec - n1) % limb_prec;
3173 (for power of 2 limb_prec n4 can be -n1 & (limb_prec)). */
3174 if (TREE_CODE (n
) == INTEGER_CST
)
3176 tree lp
= build_int_cst (TREE_TYPE (n
), limb_prec
);
3177 n1
= int_const_binop (TRUNC_MOD_EXPR
, n
, lp
);
3178 n2
= fold_convert (sizetype
, int_const_binop (TRUNC_DIV_EXPR
, n
, lp
));
3179 n3
= size_int (!integer_zerop (n1
));
3180 n4
= int_const_binop (TRUNC_MOD_EXPR
,
3181 int_const_binop (MINUS_EXPR
, lp
, n1
), lp
);
3185 n1
= make_ssa_name (TREE_TYPE (n
));
3186 n2
= make_ssa_name (sizetype
);
3187 n3
= make_ssa_name (sizetype
);
3188 n4
= make_ssa_name (TREE_TYPE (n
));
3189 if (pow2p_hwi (limb_prec
))
3191 tree lpm1
= build_int_cst (TREE_TYPE (n
), limb_prec
- 1);
3192 g
= gimple_build_assign (n1
, BIT_AND_EXPR
, n
, lpm1
);
3194 g
= gimple_build_assign (useless_type_conversion_p (sizetype
,
3196 ? n2
: make_ssa_name (TREE_TYPE (n
)),
3198 build_int_cst (TREE_TYPE (n
),
3199 exact_log2 (limb_prec
)));
3201 if (gimple_assign_lhs (g
) != n2
)
3203 g
= gimple_build_assign (n2
, NOP_EXPR
, gimple_assign_lhs (g
));
3206 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (n
)),
3209 g
= gimple_build_assign (n4
, BIT_AND_EXPR
, gimple_assign_lhs (g
),
3215 tree lp
= build_int_cst (TREE_TYPE (n
), limb_prec
);
3216 g
= gimple_build_assign (n1
, TRUNC_MOD_EXPR
, n
, lp
);
3218 g
= gimple_build_assign (useless_type_conversion_p (sizetype
,
3220 ? n2
: make_ssa_name (TREE_TYPE (n
)),
3221 TRUNC_DIV_EXPR
, n
, lp
);
3223 if (gimple_assign_lhs (g
) != n2
)
3225 g
= gimple_build_assign (n2
, NOP_EXPR
, gimple_assign_lhs (g
));
3228 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (n
)),
3229 MINUS_EXPR
, lp
, n1
);
3231 g
= gimple_build_assign (n4
, TRUNC_MOD_EXPR
, gimple_assign_lhs (g
),
3235 g
= gimple_build_assign (make_ssa_name (boolean_type_node
), NE_EXPR
, n1
,
3236 build_zero_cst (TREE_TYPE (n
)));
3238 g
= gimple_build_assign (n3
, NOP_EXPR
, gimple_assign_lhs (g
));
3241 tree p
= build_int_cst (sizetype
,
3242 prec
/ limb_prec
- (prec
% limb_prec
== 0));
3243 if (rhs_code
== RSHIFT_EXPR
)
3248 unsigned n1 = n % limb_prec;
3249 size_t n2 = n / limb_prec;
3250 size_t n3 = n1 != 0;
3251 unsigned n4 = (limb_prec - n1) % limb_prec;
3253 size_t p = prec / limb_prec - (prec % limb_prec == 0);
3254 int signed_p = (typeof (src) -1) < 0;
3255 for (idx = n2; idx < ((!signed_p && (prec % limb_prec == 0))
3256 ? p : p - n3); ++idx)
3257 dst[idx - n2] = (src[idx] >> n1) | (src[idx + n3] << n4);
3259 if (prec % limb_prec == 0)
3262 ext = ((signed limb_type) (src[p] << (limb_prec
3263 - (prec % limb_prec))))
3264 >> (limb_prec - (prec % limb_prec));
3266 ext = src[p] & (((limb_type) 1 << (prec % limb_prec)) - 1);
3267 if (!signed_p && (prec % limb_prec == 0))
3269 else if (idx < prec / 64)
3271 dst[idx - n2] = (src[idx] >> n1) | (ext << n4);
3277 dst[idx] = ((signed limb_type) ext) >> n1;
3278 ext = ((signed limb_type) ext) >> (limb_prec - 1);
3282 dst[idx] = ext >> n1;
3285 for (++idx; idx <= p; ++idx)
3288 if (TYPE_UNSIGNED (type
) && prec
% limb_prec
== 0)
3290 else if (TREE_CODE (n3
) == INTEGER_CST
)
3291 pmn3
= int_const_binop (MINUS_EXPR
, p
, n3
);
3294 pmn3
= make_ssa_name (sizetype
);
3295 g
= gimple_build_assign (pmn3
, MINUS_EXPR
, p
, n3
);
3298 g
= gimple_build_cond (LT_EXPR
, n2
, pmn3
, NULL_TREE
, NULL_TREE
);
3299 edge edge_true
, edge_false
;
3300 if_then (g
, profile_probability::likely (), edge_true
, edge_false
);
3302 tree idx
= create_loop (n2
, &idx_next
);
3303 tree idxmn2
= make_ssa_name (sizetype
);
3304 tree idxpn3
= make_ssa_name (sizetype
);
3305 g
= gimple_build_assign (idxmn2
, MINUS_EXPR
, idx
, n2
);
3307 g
= gimple_build_assign (idxpn3
, PLUS_EXPR
, idx
, n3
);
3310 tree t1
= handle_operand (rhs1
, idx
);
3312 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3313 RSHIFT_EXPR
, t1
, n1
);
3315 t1
= gimple_assign_lhs (g
);
3316 if (!integer_zerop (n3
))
3319 tree t2
= handle_operand (rhs1
, idxpn3
);
3320 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3321 LSHIFT_EXPR
, t2
, n4
);
3323 t2
= gimple_assign_lhs (g
);
3324 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3325 BIT_IOR_EXPR
, t1
, t2
);
3327 t1
= gimple_assign_lhs (g
);
3329 tree l
= limb_access (TREE_TYPE (lhs
), obj
, idxmn2
, true);
3330 g
= gimple_build_assign (l
, t1
);
3332 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_one_node
);
3334 g
= gimple_build_cond (LT_EXPR
, idx_next
, pmn3
, NULL_TREE
, NULL_TREE
);
3336 idx
= make_ssa_name (sizetype
);
3337 m_gsi
= gsi_for_stmt (final_stmt
);
3338 gphi
*phi
= create_phi_node (idx
, gsi_bb (m_gsi
));
3339 edge_false
= find_edge (edge_false
->src
, gsi_bb (m_gsi
));
3340 edge_true
= EDGE_PRED (gsi_bb (m_gsi
),
3341 EDGE_PRED (gsi_bb (m_gsi
), 0) == edge_false
);
3342 add_phi_arg (phi
, n2
, edge_false
, UNKNOWN_LOCATION
);
3343 add_phi_arg (phi
, idx_next
, edge_true
, UNKNOWN_LOCATION
);
3345 tree ms
= handle_operand (rhs1
, p
);
3347 if (!types_compatible_p (TREE_TYPE (ms
), m_limb_type
))
3348 ext
= add_cast (m_limb_type
, ms
);
3349 if (!(TYPE_UNSIGNED (type
) && prec
% limb_prec
== 0)
3350 && !integer_zerop (n3
))
3352 g
= gimple_build_cond (LT_EXPR
, idx
, p
, NULL_TREE
, NULL_TREE
);
3353 if_then (g
, profile_probability::likely (), edge_true
, edge_false
);
3355 t1
= handle_operand (rhs1
, idx
);
3356 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3357 RSHIFT_EXPR
, t1
, n1
);
3359 t1
= gimple_assign_lhs (g
);
3360 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3361 LSHIFT_EXPR
, ext
, n4
);
3363 tree t2
= gimple_assign_lhs (g
);
3364 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3365 BIT_IOR_EXPR
, t1
, t2
);
3367 t1
= gimple_assign_lhs (g
);
3368 idxmn2
= make_ssa_name (sizetype
);
3369 g
= gimple_build_assign (idxmn2
, MINUS_EXPR
, idx
, n2
);
3371 l
= limb_access (TREE_TYPE (lhs
), obj
, idxmn2
, true);
3372 g
= gimple_build_assign (l
, t1
);
3374 idx_next
= make_ssa_name (sizetype
);
3375 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_one_node
);
3377 m_gsi
= gsi_for_stmt (final_stmt
);
3378 tree nidx
= make_ssa_name (sizetype
);
3379 phi
= create_phi_node (nidx
, gsi_bb (m_gsi
));
3380 edge_false
= find_edge (edge_false
->src
, gsi_bb (m_gsi
));
3381 edge_true
= EDGE_PRED (gsi_bb (m_gsi
),
3382 EDGE_PRED (gsi_bb (m_gsi
), 0) == edge_false
);
3383 add_phi_arg (phi
, idx
, edge_false
, UNKNOWN_LOCATION
);
3384 add_phi_arg (phi
, idx_next
, edge_true
, UNKNOWN_LOCATION
);
3387 g
= gimple_build_assign (make_ssa_name (sizetype
), MINUS_EXPR
, idx
, n2
);
3389 idx
= gimple_assign_lhs (g
);
3391 if (!TYPE_UNSIGNED (type
))
3392 sext
= add_cast (signed_type_for (m_limb_type
), ext
);
3393 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (sext
)),
3394 RSHIFT_EXPR
, sext
, n1
);
3396 t1
= gimple_assign_lhs (g
);
3397 if (!TYPE_UNSIGNED (type
))
3399 t1
= add_cast (m_limb_type
, t1
);
3400 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (sext
)),
3402 build_int_cst (TREE_TYPE (n
),
3405 ext
= add_cast (m_limb_type
, gimple_assign_lhs (g
));
3408 ext
= build_zero_cst (m_limb_type
);
3409 l
= limb_access (TREE_TYPE (lhs
), obj
, idx
, true);
3410 g
= gimple_build_assign (l
, t1
);
3412 g
= gimple_build_assign (make_ssa_name (sizetype
), PLUS_EXPR
, idx
,
3415 idx
= gimple_assign_lhs (g
);
3416 g
= gimple_build_cond (LE_EXPR
, idx
, p
, NULL_TREE
, NULL_TREE
);
3417 if_then (g
, profile_probability::likely (), edge_true
, edge_false
);
3418 idx
= create_loop (idx
, &idx_next
);
3419 l
= limb_access (TREE_TYPE (lhs
), obj
, idx
, true);
3420 g
= gimple_build_assign (l
, ext
);
3422 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_one_node
);
3424 g
= gimple_build_cond (LE_EXPR
, idx_next
, p
, NULL_TREE
, NULL_TREE
);
3432 unsigned n1 = n % limb_prec;
3433 size_t n2 = n / limb_prec;
3434 size_t n3 = n1 != 0;
3435 unsigned n4 = (limb_prec - n1) % limb_prec;
3437 size_t p = prec / limb_prec - (prec % limb_prec == 0);
3438 for (idx = p; (ssize_t) idx >= (ssize_t) (n2 + n3); --idx)
3439 dst[idx] = (src[idx - n2] << n1) | (src[idx - n2 - n3] >> n4);
3442 dst[idx] = src[idx - n2] << n1;
3445 for (; (ssize_t) idx >= 0; --idx)
3448 if (TREE_CODE (n2
) == INTEGER_CST
&& TREE_CODE (n3
) == INTEGER_CST
)
3449 n2pn3
= int_const_binop (PLUS_EXPR
, n2
, n3
);
3452 n2pn3
= make_ssa_name (sizetype
);
3453 g
= gimple_build_assign (n2pn3
, PLUS_EXPR
, n2
, n3
);
3456 /* For LSHIFT_EXPR, we can use handle_operand with non-INTEGER_CST
3457 idx even to access the most significant partial limb. */
3459 if (integer_zerop (n3
))
3460 /* For n3 == 0 p >= n2 + n3 is always true for all valid shift
3461 counts. Emit if (true) condition that can be optimized later. */
3462 g
= gimple_build_cond (NE_EXPR
, boolean_true_node
, boolean_false_node
,
3463 NULL_TREE
, NULL_TREE
);
3465 g
= gimple_build_cond (LE_EXPR
, n2pn3
, p
, NULL_TREE
, NULL_TREE
);
3466 edge edge_true
, edge_false
;
3467 if_then (g
, profile_probability::likely (), edge_true
, edge_false
);
3469 tree idx
= create_loop (p
, &idx_next
);
3470 tree idxmn2
= make_ssa_name (sizetype
);
3471 tree idxmn2mn3
= make_ssa_name (sizetype
);
3472 g
= gimple_build_assign (idxmn2
, MINUS_EXPR
, idx
, n2
);
3474 g
= gimple_build_assign (idxmn2mn3
, MINUS_EXPR
, idxmn2
, n3
);
3477 tree t1
= handle_operand (rhs1
, idxmn2
);
3479 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3480 LSHIFT_EXPR
, t1
, n1
);
3482 t1
= gimple_assign_lhs (g
);
3483 if (!integer_zerop (n3
))
3486 tree t2
= handle_operand (rhs1
, idxmn2mn3
);
3487 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3488 RSHIFT_EXPR
, t2
, n4
);
3490 t2
= gimple_assign_lhs (g
);
3491 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3492 BIT_IOR_EXPR
, t1
, t2
);
3494 t1
= gimple_assign_lhs (g
);
3496 tree l
= limb_access (TREE_TYPE (lhs
), obj
, idx
, true);
3497 g
= gimple_build_assign (l
, t1
);
3499 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_int (-1));
3501 tree sn2pn3
= add_cast (ssizetype
, n2pn3
);
3502 g
= gimple_build_cond (GE_EXPR
, add_cast (ssizetype
, idx_next
), sn2pn3
,
3503 NULL_TREE
, NULL_TREE
);
3505 idx
= make_ssa_name (sizetype
);
3506 m_gsi
= gsi_for_stmt (final_stmt
);
3507 gphi
*phi
= create_phi_node (idx
, gsi_bb (m_gsi
));
3508 edge_false
= find_edge (edge_false
->src
, gsi_bb (m_gsi
));
3509 edge_true
= EDGE_PRED (gsi_bb (m_gsi
),
3510 EDGE_PRED (gsi_bb (m_gsi
), 0) == edge_false
);
3511 add_phi_arg (phi
, p
, edge_false
, UNKNOWN_LOCATION
);
3512 add_phi_arg (phi
, idx_next
, edge_true
, UNKNOWN_LOCATION
);
3514 if (!integer_zerop (n3
))
3516 g
= gimple_build_cond (NE_EXPR
, n3
, size_zero_node
,
3517 NULL_TREE
, NULL_TREE
);
3518 if_then (g
, profile_probability::likely (), edge_true
, edge_false
);
3519 idxmn2
= make_ssa_name (sizetype
);
3520 g
= gimple_build_assign (idxmn2
, MINUS_EXPR
, idx
, n2
);
3523 t1
= handle_operand (rhs1
, idxmn2
);
3524 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3525 LSHIFT_EXPR
, t1
, n1
);
3527 t1
= gimple_assign_lhs (g
);
3528 l
= limb_access (TREE_TYPE (lhs
), obj
, idx
, true);
3529 g
= gimple_build_assign (l
, t1
);
3531 idx_next
= make_ssa_name (sizetype
);
3532 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_int (-1));
3534 m_gsi
= gsi_for_stmt (final_stmt
);
3535 tree nidx
= make_ssa_name (sizetype
);
3536 phi
= create_phi_node (nidx
, gsi_bb (m_gsi
));
3537 edge_false
= find_edge (edge_false
->src
, gsi_bb (m_gsi
));
3538 edge_true
= EDGE_PRED (gsi_bb (m_gsi
),
3539 EDGE_PRED (gsi_bb (m_gsi
), 0) == edge_false
);
3540 add_phi_arg (phi
, idx
, edge_false
, UNKNOWN_LOCATION
);
3541 add_phi_arg (phi
, idx_next
, edge_true
, UNKNOWN_LOCATION
);
3544 g
= gimple_build_cond (GE_EXPR
, add_cast (ssizetype
, idx
),
3545 ssize_int (0), NULL_TREE
, NULL_TREE
);
3546 if_then (g
, profile_probability::likely (), edge_true
, edge_false
);
3547 idx
= create_loop (idx
, &idx_next
);
3548 l
= limb_access (TREE_TYPE (lhs
), obj
, idx
, true);
3549 g
= gimple_build_assign (l
, build_zero_cst (m_limb_type
));
3551 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_int (-1));
3553 g
= gimple_build_cond (GE_EXPR
, add_cast (ssizetype
, idx_next
),
3554 ssize_int (0), NULL_TREE
, NULL_TREE
);
3559 /* Lower large/huge _BitInt multiplication or division. */
3562 bitint_large_huge::lower_muldiv_stmt (tree obj
, gimple
*stmt
)
3564 tree rhs1
= gimple_assign_rhs1 (stmt
);
3565 tree rhs2
= gimple_assign_rhs2 (stmt
);
3566 tree lhs
= gimple_assign_lhs (stmt
);
3567 tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3568 tree type
= TREE_TYPE (rhs1
);
3569 gcc_assert (TREE_CODE (type
) == BITINT_TYPE
3570 && bitint_precision_kind (type
) >= bitint_prec_large
);
3571 int prec
= TYPE_PRECISION (type
), prec1
, prec2
;
3572 rhs1
= handle_operand_addr (rhs1
, stmt
, NULL
, &prec1
);
3573 rhs2
= handle_operand_addr (rhs2
, stmt
, NULL
, &prec2
);
3574 if (obj
== NULL_TREE
)
3576 int part
= var_to_partition (m_map
, lhs
);
3577 gcc_assert (m_vars
[part
] != NULL_TREE
);
3579 lhs
= build_fold_addr_expr (obj
);
3583 lhs
= build_fold_addr_expr (obj
);
3584 lhs
= force_gimple_operand_gsi (&m_gsi
, lhs
, true,
3585 NULL_TREE
, true, GSI_SAME_STMT
);
3587 tree sitype
= lang_hooks
.types
.type_for_mode (SImode
, 0);
3592 g
= gimple_build_call_internal (IFN_MULBITINT
, 6,
3593 lhs
, build_int_cst (sitype
, prec
),
3594 rhs1
, build_int_cst (sitype
, prec1
),
3595 rhs2
, build_int_cst (sitype
, prec2
));
3598 case TRUNC_DIV_EXPR
:
3599 g
= gimple_build_call_internal (IFN_DIVMODBITINT
, 8,
3600 lhs
, build_int_cst (sitype
, prec
),
3602 build_int_cst (sitype
, 0),
3603 rhs1
, build_int_cst (sitype
, prec1
),
3604 rhs2
, build_int_cst (sitype
, prec2
));
3605 if (!stmt_ends_bb_p (stmt
))
3606 gimple_call_set_nothrow (as_a
<gcall
*> (g
), true);
3609 case TRUNC_MOD_EXPR
:
3610 g
= gimple_build_call_internal (IFN_DIVMODBITINT
, 8, null_pointer_node
,
3611 build_int_cst (sitype
, 0),
3612 lhs
, build_int_cst (sitype
, prec
),
3613 rhs1
, build_int_cst (sitype
, prec1
),
3614 rhs2
, build_int_cst (sitype
, prec2
));
3615 if (!stmt_ends_bb_p (stmt
))
3616 gimple_call_set_nothrow (as_a
<gcall
*> (g
), true);
3622 if (stmt_ends_bb_p (stmt
))
3624 maybe_duplicate_eh_stmt (g
, stmt
);
3627 basic_block bb
= gimple_bb (stmt
);
3629 FOR_EACH_EDGE (e1
, ei
, bb
->succs
)
3630 if (e1
->flags
& EDGE_EH
)
3634 edge e2
= split_block (gsi_bb (m_gsi
), g
);
3635 m_gsi
= gsi_after_labels (e2
->dest
);
3636 add_eh_edge (e2
->src
, e1
);
3641 /* Lower large/huge _BitInt conversion to/from floating point. */
3644 bitint_large_huge::lower_float_conv_stmt (tree obj
, gimple
*stmt
)
3646 tree rhs1
= gimple_assign_rhs1 (stmt
);
3647 tree lhs
= gimple_assign_lhs (stmt
);
3648 tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3649 tree sitype
= lang_hooks
.types
.type_for_mode (SImode
, 0);
3651 if (rhs_code
== FIX_TRUNC_EXPR
)
3653 int prec
= TYPE_PRECISION (TREE_TYPE (lhs
));
3654 if (!TYPE_UNSIGNED (TREE_TYPE (lhs
)))
3656 if (obj
== NULL_TREE
)
3658 int part
= var_to_partition (m_map
, lhs
);
3659 gcc_assert (m_vars
[part
] != NULL_TREE
);
3661 lhs
= build_fold_addr_expr (obj
);
3665 lhs
= build_fold_addr_expr (obj
);
3666 lhs
= force_gimple_operand_gsi (&m_gsi
, lhs
, true,
3667 NULL_TREE
, true, GSI_SAME_STMT
);
3669 scalar_mode from_mode
3670 = as_a
<scalar_mode
> (TYPE_MODE (TREE_TYPE (rhs1
)));
3672 /* IEEE single is a full superset of both IEEE half and
3673 bfloat formats, convert to float first and then to _BitInt
3674 to avoid the need of another 2 library routines. */
3675 if ((REAL_MODE_FORMAT (from_mode
) == &arm_bfloat_half_format
3676 || REAL_MODE_FORMAT (from_mode
) == &ieee_half_format
)
3677 && REAL_MODE_FORMAT (SFmode
) == &ieee_single_format
)
3679 tree type
= lang_hooks
.types
.type_for_mode (SFmode
, 0);
3681 rhs1
= add_cast (type
, rhs1
);
3684 g
= gimple_build_call_internal (IFN_FLOATTOBITINT
, 3,
3685 lhs
, build_int_cst (sitype
, prec
),
3692 rhs1
= handle_operand_addr (rhs1
, stmt
, NULL
, &prec
);
3693 g
= gimple_build_call_internal (IFN_BITINTTOFLOAT
, 2,
3694 rhs1
, build_int_cst (sitype
, prec
));
3695 gimple_call_set_lhs (g
, lhs
);
3696 if (!stmt_ends_bb_p (stmt
))
3697 gimple_call_set_nothrow (as_a
<gcall
*> (g
), true);
3698 gsi_replace (&m_gsi
, g
, true);
3702 /* Helper method for lower_addsub_overflow and lower_mul_overflow.
3703 If check_zero is true, caller wants to check if all bits in [start, end)
3704 are zero, otherwise if bits in [start, end) are either all zero or
3705 all ones. L is the limb with index LIMB, START and END are measured
3709 bitint_large_huge::arith_overflow_extract_bits (unsigned int start
,
3710 unsigned int end
, tree l
,
3714 unsigned startlimb
= start
/ limb_prec
;
3715 unsigned endlimb
= (end
- 1) / limb_prec
;
3718 if ((start
% limb_prec
) == 0 && (end
% limb_prec
) == 0)
3720 if (startlimb
== endlimb
&& limb
== startlimb
)
3724 wide_int w
= wi::shifted_mask (start
% limb_prec
,
3725 end
- start
, false, limb_prec
);
3726 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3728 wide_int_to_tree (m_limb_type
, w
));
3730 return gimple_assign_lhs (g
);
3732 unsigned int shift
= start
% limb_prec
;
3733 if ((end
% limb_prec
) != 0)
3735 unsigned int lshift
= (-end
) % limb_prec
;
3737 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3739 build_int_cst (unsigned_type_node
,
3742 l
= gimple_assign_lhs (g
);
3744 l
= add_cast (signed_type_for (m_limb_type
), l
);
3745 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (l
)),
3747 build_int_cst (unsigned_type_node
, shift
));
3749 return add_cast (m_limb_type
, gimple_assign_lhs (g
));
3751 else if (limb
== startlimb
)
3753 if ((start
% limb_prec
) == 0)
3756 l
= add_cast (signed_type_for (m_limb_type
), l
);
3757 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (l
)),
3759 build_int_cst (unsigned_type_node
,
3760 start
% limb_prec
));
3762 l
= gimple_assign_lhs (g
);
3764 l
= add_cast (m_limb_type
, l
);
3767 else if (limb
== endlimb
)
3769 if ((end
% limb_prec
) == 0)
3773 wide_int w
= wi::mask (end
% limb_prec
, false, limb_prec
);
3774 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3776 wide_int_to_tree (m_limb_type
, w
));
3778 return gimple_assign_lhs (g
);
3780 unsigned int shift
= (-end
) % limb_prec
;
3781 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3783 build_int_cst (unsigned_type_node
, shift
));
3785 l
= add_cast (signed_type_for (m_limb_type
), gimple_assign_lhs (g
));
3786 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (l
)),
3788 build_int_cst (unsigned_type_node
, shift
));
3790 return add_cast (m_limb_type
, gimple_assign_lhs (g
));
3795 /* Helper method for lower_addsub_overflow and lower_mul_overflow. Store
3796 result including overflow flag into the right locations. */
3799 bitint_large_huge::finish_arith_overflow (tree var
, tree obj
, tree type
,
3800 tree ovf
, tree lhs
, tree orig_obj
,
3801 gimple
*stmt
, tree_code code
)
3805 if (obj
== NULL_TREE
3806 && (TREE_CODE (type
) != BITINT_TYPE
3807 || bitint_precision_kind (type
) < bitint_prec_large
))
3809 /* Add support for 3 or more limbs filled in from normal integral
3810 type if this assert fails. If no target chooses limb mode smaller
3811 than half of largest supported normal integral type, this will not
3813 gcc_assert (TYPE_PRECISION (type
) <= 2 * limb_prec
);
3814 tree lhs_type
= type
;
3815 if (TREE_CODE (type
) == BITINT_TYPE
3816 && bitint_precision_kind (type
) == bitint_prec_middle
)
3817 lhs_type
= build_nonstandard_integer_type (TYPE_PRECISION (type
),
3818 TYPE_UNSIGNED (type
));
3819 tree r1
= limb_access (NULL_TREE
, var
, size_int (0), true);
3820 g
= gimple_build_assign (make_ssa_name (m_limb_type
), r1
);
3822 r1
= gimple_assign_lhs (g
);
3823 if (!useless_type_conversion_p (lhs_type
, TREE_TYPE (r1
)))
3824 r1
= add_cast (lhs_type
, r1
);
3825 if (TYPE_PRECISION (lhs_type
) > limb_prec
)
3827 tree r2
= limb_access (NULL_TREE
, var
, size_int (1), true);
3828 g
= gimple_build_assign (make_ssa_name (m_limb_type
), r2
);
3830 r2
= gimple_assign_lhs (g
);
3831 r2
= add_cast (lhs_type
, r2
);
3832 g
= gimple_build_assign (make_ssa_name (lhs_type
), LSHIFT_EXPR
, r2
,
3833 build_int_cst (unsigned_type_node
,
3836 g
= gimple_build_assign (make_ssa_name (lhs_type
), BIT_IOR_EXPR
, r1
,
3837 gimple_assign_lhs (g
));
3839 r1
= gimple_assign_lhs (g
);
3841 if (lhs_type
!= type
)
3842 r1
= add_cast (type
, r1
);
3843 ovf
= add_cast (lhs_type
, ovf
);
3844 if (lhs_type
!= type
)
3845 ovf
= add_cast (type
, ovf
);
3846 g
= gimple_build_assign (lhs
, COMPLEX_EXPR
, r1
, ovf
);
3847 m_gsi
= gsi_for_stmt (stmt
);
3848 gsi_replace (&m_gsi
, g
, true);
3852 unsigned HOST_WIDE_INT nelts
= 0;
3853 tree atype
= NULL_TREE
;
3856 nelts
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj
))) / limb_prec
;
3857 if (orig_obj
== NULL_TREE
)
3859 atype
= build_array_type_nelts (m_limb_type
, nelts
);
3865 if (orig_obj
== NULL_TREE
)
3867 zero
= build_zero_cst (build_pointer_type (TREE_TYPE (obj
)));
3868 v1
= build2 (MEM_REF
, atype
,
3869 build_fold_addr_expr (unshare_expr (obj
)), zero
);
3871 else if (!useless_type_conversion_p (atype
, TREE_TYPE (obj
)))
3872 v1
= build1 (VIEW_CONVERT_EXPR
, atype
, unshare_expr (obj
));
3874 v1
= unshare_expr (obj
);
3875 zero
= build_zero_cst (build_pointer_type (TREE_TYPE (var
)));
3876 v2
= build2 (MEM_REF
, atype
, build_fold_addr_expr (var
), zero
);
3877 g
= gimple_build_assign (v1
, v2
);
3880 if (orig_obj
== NULL_TREE
&& obj
)
3882 ovf
= add_cast (m_limb_type
, ovf
);
3883 tree l
= limb_access (NULL_TREE
, obj
, size_int (nelts
), true);
3884 g
= gimple_build_assign (l
, ovf
);
3888 atype
= build_array_type_nelts (m_limb_type
, nelts
- 1);
3889 tree off
= build_int_cst (build_pointer_type (TREE_TYPE (obj
)),
3890 (nelts
+ 1) * m_limb_size
);
3891 tree v1
= build2 (MEM_REF
, atype
,
3892 build_fold_addr_expr (unshare_expr (obj
)),
3894 g
= gimple_build_assign (v1
, build_zero_cst (atype
));
3898 else if (TREE_CODE (TREE_TYPE (lhs
)) == COMPLEX_TYPE
)
3900 imm_use_iterator ui
;
3901 use_operand_p use_p
;
3902 FOR_EACH_IMM_USE_FAST (use_p
, ui
, lhs
)
3904 g
= USE_STMT (use_p
);
3905 if (!is_gimple_assign (g
)
3906 || gimple_assign_rhs_code (g
) != IMAGPART_EXPR
)
3908 tree lhs2
= gimple_assign_lhs (g
);
3910 single_imm_use (lhs2
, &use_p
, &use_stmt
);
3911 lhs2
= gimple_assign_lhs (use_stmt
);
3912 gimple_stmt_iterator gsi
= gsi_for_stmt (use_stmt
);
3913 if (useless_type_conversion_p (TREE_TYPE (lhs2
), TREE_TYPE (ovf
)))
3914 g
= gimple_build_assign (lhs2
, ovf
);
3916 g
= gimple_build_assign (lhs2
, NOP_EXPR
, ovf
);
3917 gsi_replace (&gsi
, g
, true);
3918 if (gsi_stmt (m_gsi
) == use_stmt
)
3919 m_gsi
= gsi_for_stmt (g
);
3923 else if (ovf
!= boolean_false_node
)
3925 g
= gimple_build_cond (NE_EXPR
, ovf
, boolean_false_node
,
3926 NULL_TREE
, NULL_TREE
);
3927 edge edge_true
, edge_false
;
3928 if_then (g
, profile_probability::very_unlikely (),
3929 edge_true
, edge_false
);
3930 tree zero
= build_zero_cst (TREE_TYPE (lhs
));
3931 tree fn
= ubsan_build_overflow_builtin (code
, m_loc
,
3934 force_gimple_operand_gsi (&m_gsi
, fn
, true, NULL_TREE
,
3935 true, GSI_SAME_STMT
);
3936 m_gsi
= gsi_after_labels (edge_true
->dest
);
3941 tree clobber
= build_clobber (TREE_TYPE (var
), CLOBBER_STORAGE_END
);
3942 g
= gimple_build_assign (var
, clobber
);
3943 gsi_insert_after (&m_gsi
, g
, GSI_SAME_STMT
);
3947 /* Helper function for lower_addsub_overflow and lower_mul_overflow.
3948 Given precisions of result TYPE (PREC), argument 0 precision PREC0,
3949 argument 1 precision PREC1 and minimum precision for the result
3950 PREC2, compute *START, *END, *CHECK_ZERO and return OVF. */
3953 arith_overflow (tree_code code
, tree type
, int prec
, int prec0
, int prec1
,
3954 int prec2
, unsigned *start
, unsigned *end
, bool *check_zero
)
3959 /* Ignore this special rule for subtraction, even if both
3960 prec0 >= 0 and prec1 >= 0, their subtraction can be negative
3961 in infinite precision. */
3962 if (code
!= MINUS_EXPR
&& prec0
>= 0 && prec1
>= 0)
3964 /* Result in [0, prec2) is unsigned, if prec > prec2,
3965 all bits above it will be zero. */
3966 if ((prec
- !TYPE_UNSIGNED (type
)) >= prec2
)
3967 return boolean_false_node
;
3970 /* ovf if any of bits in [start, end) is non-zero. */
3971 *start
= prec
- !TYPE_UNSIGNED (type
);
3975 else if (TYPE_UNSIGNED (type
))
3977 /* If result in [0, prec2) is signed and if prec > prec2,
3978 all bits above it will be sign bit copies. */
3981 /* ovf if bit prec - 1 is non-zero. */
3987 /* ovf if any of bits in [start, end) is non-zero. */
3992 else if (prec
>= prec2
)
3993 return boolean_false_node
;
3996 /* ovf if [start, end) bits aren't all zeros or all ones. */
3999 *check_zero
= false;
4004 /* Lower a .{ADD,SUB}_OVERFLOW call with at least one large/huge _BitInt
4005 argument or return type _Complex large/huge _BitInt. */
4008 bitint_large_huge::lower_addsub_overflow (tree obj
, gimple
*stmt
)
4010 tree arg0
= gimple_call_arg (stmt
, 0);
4011 tree arg1
= gimple_call_arg (stmt
, 1);
4012 tree lhs
= gimple_call_lhs (stmt
);
4017 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
4018 gsi_remove (&gsi
, true);
4021 gimple
*final_stmt
= gsi_stmt (m_gsi
);
4022 tree type
= TREE_TYPE (lhs
);
4023 if (TREE_CODE (type
) == COMPLEX_TYPE
)
4024 type
= TREE_TYPE (type
);
4025 int prec
= TYPE_PRECISION (type
);
4026 int prec0
= range_to_prec (arg0
, stmt
);
4027 int prec1
= range_to_prec (arg1
, stmt
);
4028 /* If PREC0 >= 0 && PREC1 >= 0 and CODE is not MINUS_EXPR, PREC2 is
4029 the be minimum unsigned precision of any possible operation's
4030 result, otherwise it is minimum signed precision.
4032 If PREC0 or PREC1 is 8, it means that argument is [0, 0xff],
4033 if PREC0 or PREC1 is 10, it means that argument is [0, 0x3ff],
4034 if PREC0 or PREC1 is -8, it means that argument is [-0x80, 0x7f],
4035 if PREC0 or PREC1 is -10, it means that argument is [-0x200, 0x1ff].
4036 PREC0 CODE PREC1 RESULT PREC2 SIGNED vs. UNSIGNED
4037 8 + 8 [0, 0x1fe] 9 UNSIGNED
4038 8 + 10 [0, 0x4fe] 11 UNSIGNED
4039 -8 + -8 [-0x100, 0xfe] 9 SIGNED
4040 -8 + -10 [-0x280, 0x27e] 11 SIGNED
4041 8 + -8 [-0x80, 0x17e] 10 SIGNED
4042 8 + -10 [-0x200, 0x2fe] 11 SIGNED
4043 10 + -8 [-0x80, 0x47e] 12 SIGNED
4044 8 - 8 [-0xff, 0xff] 9 SIGNED
4045 8 - 10 [-0x3ff, 0xff] 11 SIGNED
4046 10 - 8 [-0xff, 0x3ff] 11 SIGNED
4047 -8 - -8 [-0xff, 0xff] 9 SIGNED
4048 -8 - -10 [-0x27f, 0x27f] 11 SIGNED
4049 -10 - -8 [-0x27f, 0x27f] 11 SIGNED
4050 8 - -8 [-0x7f, 0x17f] 10 SIGNED
4051 8 - -10 [-0x1ff, 0x2ff] 11 SIGNED
4052 10 - -8 [-0x7f, 0x47f] 12 SIGNED
4053 -8 - 8 [-0x17f, 0x7f] 10 SIGNED
4054 -8 - 10 [-0x47f, 0x7f] 12 SIGNED
4055 -10 - 8 [-0x2ff, 0x1ff] 11 SIGNED */
4056 int prec2
= MAX (prec0
< 0 ? -prec0
: prec0
,
4057 prec1
< 0 ? -prec1
: prec1
);
4058 /* If operands are either both signed or both unsigned,
4059 we need just one additional bit. */
4060 prec2
= (((prec0
< 0) == (prec1
< 0)
4061 /* If one operand is signed and one unsigned and
4062 the signed one has larger precision, we need
4063 just one extra bit, otherwise two. */
4064 || (prec0
< 0 ? (prec2
== -prec0
&& prec2
!= prec1
)
4065 : (prec2
== -prec1
&& prec2
!= prec0
)))
4066 ? prec2
+ 1 : prec2
+ 2);
4067 int prec3
= MAX (prec0
< 0 ? -prec0
: prec0
,
4068 prec1
< 0 ? -prec1
: prec1
);
4069 prec3
= MAX (prec3
, prec
);
4070 tree var
= NULL_TREE
;
4071 tree orig_obj
= obj
;
4072 if (obj
== NULL_TREE
4073 && TREE_CODE (type
) == BITINT_TYPE
4074 && bitint_precision_kind (type
) >= bitint_prec_large
4076 && bitmap_bit_p (m_names
, SSA_NAME_VERSION (lhs
)))
4078 int part
= var_to_partition (m_map
, lhs
);
4079 gcc_assert (m_vars
[part
] != NULL_TREE
);
4081 if (TREE_TYPE (lhs
) == type
)
4084 if (TREE_CODE (type
) != BITINT_TYPE
4085 || bitint_precision_kind (type
) < bitint_prec_large
)
4087 unsigned HOST_WIDE_INT nelts
= CEIL (prec
, limb_prec
);
4088 tree atype
= build_array_type_nelts (m_limb_type
, nelts
);
4089 var
= create_tmp_var (atype
);
4092 enum tree_code code
;
4093 switch (gimple_call_internal_fn (stmt
))
4095 case IFN_ADD_OVERFLOW
:
4096 case IFN_UBSAN_CHECK_ADD
:
4099 case IFN_SUB_OVERFLOW
:
4100 case IFN_UBSAN_CHECK_SUB
:
4106 unsigned start
, end
;
4108 tree ovf
= arith_overflow (code
, type
, prec
, prec0
, prec1
, prec2
,
4109 &start
, &end
, &check_zero
);
4111 unsigned startlimb
, endlimb
;
4119 startlimb
= start
/ limb_prec
;
4120 endlimb
= (end
- 1) / limb_prec
;
4123 int prec4
= ovf
!= NULL_TREE
? prec
: prec3
;
4124 bitint_prec_kind kind
= bitint_precision_kind (prec4
);
4125 unsigned cnt
, rem
= 0, fin
= 0;
4126 tree idx
= NULL_TREE
, idx_first
= NULL_TREE
, idx_next
= NULL_TREE
;
4127 bool last_ovf
= (ovf
== NULL_TREE
4128 && CEIL (prec2
, limb_prec
) > CEIL (prec3
, limb_prec
));
4129 if (kind
!= bitint_prec_huge
)
4130 cnt
= CEIL (prec4
, limb_prec
) + last_ovf
;
4133 rem
= (prec4
% (2 * limb_prec
));
4134 fin
= (prec4
- rem
) / limb_prec
;
4135 cnt
= 2 + CEIL (rem
, limb_prec
) + last_ovf
;
4136 idx
= idx_first
= create_loop (size_zero_node
, &idx_next
);
4139 if (kind
== bitint_prec_huge
)
4140 m_upwards_2limb
= fin
;
4143 tree type0
= TREE_TYPE (arg0
);
4144 tree type1
= TREE_TYPE (arg1
);
4146 if (bitint_precision_kind (prec5
) < bitint_prec_large
)
4147 prec5
= MAX (TYPE_PRECISION (type0
), TYPE_PRECISION (type1
));
4148 if (TYPE_PRECISION (type0
) < prec5
)
4150 type0
= build_bitint_type (prec5
, TYPE_UNSIGNED (type0
));
4151 if (TREE_CODE (arg0
) == INTEGER_CST
)
4152 arg0
= fold_convert (type0
, arg0
);
4154 if (TYPE_PRECISION (type1
) < prec5
)
4156 type1
= build_bitint_type (prec5
, TYPE_UNSIGNED (type1
));
4157 if (TREE_CODE (arg1
) == INTEGER_CST
)
4158 arg1
= fold_convert (type1
, arg1
);
4160 unsigned int data_cnt
= 0;
4161 tree last_rhs1
= NULL_TREE
, last_rhs2
= NULL_TREE
;
4162 tree cmp
= build_zero_cst (m_limb_type
);
4163 unsigned prec_limbs
= CEIL ((unsigned) prec
, limb_prec
);
4164 tree ovf_out
= NULL_TREE
, cmp_out
= NULL_TREE
;
4165 for (unsigned i
= 0; i
< cnt
; i
++)
4169 if (kind
!= bitint_prec_huge
)
4172 idx
= size_int (fin
+ i
- 2);
4173 if (!last_ovf
|| i
< cnt
- 1)
4175 if (type0
!= TREE_TYPE (arg0
))
4176 rhs1
= handle_cast (type0
, arg0
, idx
);
4178 rhs1
= handle_operand (arg0
, idx
);
4179 if (type1
!= TREE_TYPE (arg1
))
4180 rhs2
= handle_cast (type1
, arg1
, idx
);
4182 rhs2
= handle_operand (arg1
, idx
);
4184 data_cnt
= m_data_cnt
;
4185 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (rhs1
)))
4186 rhs1
= add_cast (m_limb_type
, rhs1
);
4187 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (rhs2
)))
4188 rhs2
= add_cast (m_limb_type
, rhs2
);
4194 m_data_cnt
= data_cnt
;
4195 if (TYPE_UNSIGNED (type0
) || prec0
>= 0)
4196 rhs1
= build_zero_cst (m_limb_type
);
4199 rhs1
= add_cast (signed_type_for (m_limb_type
), last_rhs1
);
4200 if (TREE_CODE (rhs1
) == INTEGER_CST
)
4201 rhs1
= build_int_cst (m_limb_type
,
4202 tree_int_cst_sgn (rhs1
) < 0 ? -1 : 0);
4205 tree lpm1
= build_int_cst (unsigned_type_node
,
4207 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1
)),
4208 RSHIFT_EXPR
, rhs1
, lpm1
);
4210 rhs1
= add_cast (m_limb_type
, gimple_assign_lhs (g
));
4213 if (TYPE_UNSIGNED (type1
) || prec1
>= 0)
4214 rhs2
= build_zero_cst (m_limb_type
);
4217 rhs2
= add_cast (signed_type_for (m_limb_type
), last_rhs2
);
4218 if (TREE_CODE (rhs2
) == INTEGER_CST
)
4219 rhs2
= build_int_cst (m_limb_type
,
4220 tree_int_cst_sgn (rhs2
) < 0 ? -1 : 0);
4223 tree lpm1
= build_int_cst (unsigned_type_node
,
4225 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (rhs2
)),
4226 RSHIFT_EXPR
, rhs2
, lpm1
);
4228 rhs2
= add_cast (m_limb_type
, gimple_assign_lhs (g
));
4232 tree rhs
= handle_plus_minus (code
, rhs1
, rhs2
, idx
);
4233 if (ovf
!= boolean_false_node
)
4235 if (tree_fits_uhwi_p (idx
))
4237 unsigned limb
= tree_to_uhwi (idx
);
4238 if (limb
>= startlimb
&& limb
<= endlimb
)
4240 tree l
= arith_overflow_extract_bits (start
, end
, rhs
,
4242 tree this_ovf
= make_ssa_name (boolean_type_node
);
4243 if (ovf
== NULL_TREE
&& !check_zero
)
4246 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
4248 build_int_cst (m_limb_type
, 1));
4250 g
= gimple_build_assign (this_ovf
, GT_EXPR
,
4251 gimple_assign_lhs (g
),
4252 build_int_cst (m_limb_type
, 1));
4255 g
= gimple_build_assign (this_ovf
, NE_EXPR
, l
, cmp
);
4257 if (ovf
== NULL_TREE
)
4261 tree b
= make_ssa_name (boolean_type_node
);
4262 g
= gimple_build_assign (b
, BIT_IOR_EXPR
, ovf
, this_ovf
);
4268 else if (startlimb
< fin
)
4270 if (m_first
&& startlimb
+ 2 < fin
)
4273 ovf
= prepare_data_in_out (boolean_false_node
, idx
, &data_out
);
4274 ovf_out
= m_data
.pop ();
4278 cmp
= prepare_data_in_out (cmp
, idx
, &data_out
);
4279 cmp_out
= m_data
.pop ();
4283 if (i
!= 0 || startlimb
!= fin
- 1)
4286 bool single_comparison
4287 = (startlimb
+ 2 >= fin
|| (startlimb
& 1) != (i
& 1));
4288 if (!single_comparison
)
4290 else if ((startlimb
& 1) == (i
& 1))
4294 g
= gimple_build_cond (cmp_code
, idx
, size_int (startlimb
),
4295 NULL_TREE
, NULL_TREE
);
4296 edge edge_true_true
, edge_true_false
, edge_false
;
4298 if (!single_comparison
)
4299 g2
= gimple_build_cond (NE_EXPR
, idx
,
4300 size_int (startlimb
), NULL_TREE
,
4302 if_then_if_then_else (g
, g2
, profile_probability::likely (),
4303 profile_probability::likely (),
4304 edge_true_true
, edge_true_false
,
4306 unsigned tidx
= startlimb
+ (cmp_code
== GT_EXPR
);
4307 tree l
= arith_overflow_extract_bits (start
, end
, rhs
, tidx
,
4309 tree this_ovf
= make_ssa_name (boolean_type_node
);
4310 if (cmp_code
!= GT_EXPR
&& !check_zero
)
4312 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
4314 build_int_cst (m_limb_type
, 1));
4316 g
= gimple_build_assign (this_ovf
, GT_EXPR
,
4317 gimple_assign_lhs (g
),
4318 build_int_cst (m_limb_type
, 1));
4321 g
= gimple_build_assign (this_ovf
, NE_EXPR
, l
, cmp
);
4323 if (cmp_code
== GT_EXPR
)
4325 tree t
= make_ssa_name (boolean_type_node
);
4326 g
= gimple_build_assign (t
, BIT_IOR_EXPR
, ovf
, this_ovf
);
4330 tree this_ovf2
= NULL_TREE
;
4331 if (!single_comparison
)
4333 m_gsi
= gsi_after_labels (edge_true_true
->src
);
4334 tree t
= make_ssa_name (boolean_type_node
);
4335 g
= gimple_build_assign (t
, NE_EXPR
, rhs
, cmp
);
4337 this_ovf2
= make_ssa_name (boolean_type_node
);
4338 g
= gimple_build_assign (this_ovf2
, BIT_IOR_EXPR
,
4342 m_gsi
= gsi_after_labels (edge_true_false
->dest
);
4344 if (i
== 1 && ovf_out
)
4347 t
= make_ssa_name (boolean_type_node
);
4348 gphi
*phi
= create_phi_node (t
, edge_true_false
->dest
);
4349 add_phi_arg (phi
, this_ovf
, edge_true_false
,
4351 add_phi_arg (phi
, ovf
? ovf
4352 : boolean_false_node
, edge_false
,
4355 add_phi_arg (phi
, this_ovf2
, edge_true_true
,
4358 if (!check_zero
&& cmp_code
!= GT_EXPR
)
4360 t
= cmp_out
? cmp_out
: make_ssa_name (m_limb_type
);
4361 phi
= create_phi_node (t
, edge_true_false
->dest
);
4362 add_phi_arg (phi
, l
, edge_true_false
, UNKNOWN_LOCATION
);
4363 add_phi_arg (phi
, cmp
, edge_false
, UNKNOWN_LOCATION
);
4365 add_phi_arg (phi
, cmp
, edge_true_true
,
4375 if (tree_fits_uhwi_p (idx
) && tree_to_uhwi (idx
) >= prec_limbs
)
4377 else if (!tree_fits_uhwi_p (idx
)
4378 && (unsigned) prec
< (fin
- (i
== 0)) * limb_prec
)
4380 bool single_comparison
4381 = (((unsigned) prec
% limb_prec
) == 0
4382 || prec_limbs
+ 1 >= fin
4383 || (prec_limbs
& 1) == (i
& 1));
4384 g
= gimple_build_cond (LE_EXPR
, idx
, size_int (prec_limbs
- 1),
4385 NULL_TREE
, NULL_TREE
);
4387 if (!single_comparison
)
4388 g2
= gimple_build_cond (EQ_EXPR
, idx
,
4389 size_int (prec_limbs
- 1),
4390 NULL_TREE
, NULL_TREE
);
4391 edge edge_true_true
, edge_true_false
, edge_false
;
4392 if_then_if_then_else (g
, g2
, profile_probability::likely (),
4393 profile_probability::unlikely (),
4394 edge_true_true
, edge_true_false
,
4396 tree l
= limb_access (type
, var
? var
: obj
, idx
, true);
4397 g
= gimple_build_assign (l
, rhs
);
4399 if (!single_comparison
)
4401 m_gsi
= gsi_after_labels (edge_true_true
->src
);
4402 tree plm1idx
= size_int (prec_limbs
- 1);
4403 tree plm1type
= limb_access_type (type
, plm1idx
);
4404 l
= limb_access (type
, var
? var
: obj
, plm1idx
, true);
4405 if (!useless_type_conversion_p (plm1type
, TREE_TYPE (rhs
)))
4406 rhs
= add_cast (plm1type
, rhs
);
4407 if (!useless_type_conversion_p (TREE_TYPE (l
),
4409 rhs
= add_cast (TREE_TYPE (l
), rhs
);
4410 g
= gimple_build_assign (l
, rhs
);
4413 m_gsi
= gsi_after_labels (edge_true_false
->dest
);
4417 tree l
= limb_access (type
, var
? var
: obj
, idx
, true);
4418 if (!useless_type_conversion_p (TREE_TYPE (l
), TREE_TYPE (rhs
)))
4419 rhs
= add_cast (TREE_TYPE (l
), rhs
);
4420 g
= gimple_build_assign (l
, rhs
);
4425 if (kind
== bitint_prec_huge
&& i
<= 1)
4429 idx
= make_ssa_name (sizetype
);
4430 g
= gimple_build_assign (idx
, PLUS_EXPR
, idx_first
,
4436 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx_first
,
4439 g
= gimple_build_cond (NE_EXPR
, idx_next
, size_int (fin
),
4440 NULL_TREE
, NULL_TREE
);
4442 m_gsi
= gsi_for_stmt (final_stmt
);
4448 finish_arith_overflow (var
, obj
, type
, ovf
, lhs
, orig_obj
, stmt
, code
);
4451 /* Lower a .MUL_OVERFLOW call with at least one large/huge _BitInt
4452 argument or return type _Complex large/huge _BitInt. */
4455 bitint_large_huge::lower_mul_overflow (tree obj
, gimple
*stmt
)
4457 tree arg0
= gimple_call_arg (stmt
, 0);
4458 tree arg1
= gimple_call_arg (stmt
, 1);
4459 tree lhs
= gimple_call_lhs (stmt
);
4462 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
4463 gsi_remove (&gsi
, true);
4466 gimple
*final_stmt
= gsi_stmt (m_gsi
);
4467 tree type
= TREE_TYPE (lhs
);
4468 if (TREE_CODE (type
) == COMPLEX_TYPE
)
4469 type
= TREE_TYPE (type
);
4470 int prec
= TYPE_PRECISION (type
), prec0
, prec1
;
4471 arg0
= handle_operand_addr (arg0
, stmt
, NULL
, &prec0
);
4472 arg1
= handle_operand_addr (arg1
, stmt
, NULL
, &prec1
);
4473 int prec2
= ((prec0
< 0 ? -prec0
: prec0
)
4474 + (prec1
< 0 ? -prec1
: prec1
));
4475 if (prec0
== 1 || prec1
== 1)
4477 tree var
= NULL_TREE
;
4478 tree orig_obj
= obj
;
4479 bool force_var
= false;
4480 if (obj
== NULL_TREE
4481 && TREE_CODE (type
) == BITINT_TYPE
4482 && bitint_precision_kind (type
) >= bitint_prec_large
4484 && bitmap_bit_p (m_names
, SSA_NAME_VERSION (lhs
)))
4486 int part
= var_to_partition (m_map
, lhs
);
4487 gcc_assert (m_vars
[part
] != NULL_TREE
);
4489 if (TREE_TYPE (lhs
) == type
)
4492 else if (obj
!= NULL_TREE
&& DECL_P (obj
))
4494 for (int i
= 0; i
< 2; ++i
)
4496 tree arg
= i
? arg1
: arg0
;
4497 if (TREE_CODE (arg
) == ADDR_EXPR
)
4498 arg
= TREE_OPERAND (arg
, 0);
4499 if (get_base_address (arg
) == obj
)
4506 if (obj
== NULL_TREE
4508 || TREE_CODE (type
) != BITINT_TYPE
4509 || bitint_precision_kind (type
) < bitint_prec_large
4510 || prec2
> (CEIL (prec
, limb_prec
) * limb_prec
* (orig_obj
? 1 : 2)))
4512 unsigned HOST_WIDE_INT nelts
= CEIL (MAX (prec
, prec2
), limb_prec
);
4513 tree atype
= build_array_type_nelts (m_limb_type
, nelts
);
4514 var
= create_tmp_var (atype
);
4516 tree addr
= build_fold_addr_expr (var
? var
: obj
);
4517 addr
= force_gimple_operand_gsi (&m_gsi
, addr
, true,
4518 NULL_TREE
, true, GSI_SAME_STMT
);
4519 tree sitype
= lang_hooks
.types
.type_for_mode (SImode
, 0);
4521 = gimple_build_call_internal (IFN_MULBITINT
, 6,
4522 addr
, build_int_cst (sitype
,
4524 arg0
, build_int_cst (sitype
, prec0
),
4525 arg1
, build_int_cst (sitype
, prec1
));
4528 unsigned start
, end
;
4530 tree ovf
= arith_overflow (MULT_EXPR
, type
, prec
, prec0
, prec1
, prec2
,
4531 &start
, &end
, &check_zero
);
4532 if (ovf
== NULL_TREE
)
4534 unsigned startlimb
= start
/ limb_prec
;
4535 unsigned endlimb
= (end
- 1) / limb_prec
;
4537 bool use_loop
= false;
4538 if (startlimb
== endlimb
)
4540 else if (startlimb
+ 1 == endlimb
)
4542 else if ((end
% limb_prec
) == 0)
4550 use_loop
= startlimb
+ 2 < endlimb
;
4554 tree l
= limb_access (NULL_TREE
, var
? var
: obj
,
4555 size_int (startlimb
), true);
4556 g
= gimple_build_assign (make_ssa_name (m_limb_type
), l
);
4558 l
= arith_overflow_extract_bits (start
, end
, gimple_assign_lhs (g
),
4559 startlimb
, check_zero
);
4560 ovf
= make_ssa_name (boolean_type_node
);
4562 g
= gimple_build_assign (ovf
, NE_EXPR
, l
,
4563 build_zero_cst (m_limb_type
));
4566 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
4568 build_int_cst (m_limb_type
, 1));
4570 g
= gimple_build_assign (ovf
, GT_EXPR
, gimple_assign_lhs (g
),
4571 build_int_cst (m_limb_type
, 1));
4577 basic_block edge_bb
= NULL
;
4578 gimple_stmt_iterator gsi
= m_gsi
;
4580 edge e
= split_block (gsi_bb (gsi
), gsi_stmt (gsi
));
4582 m_gsi
= gsi_end_bb (edge_bb
);
4584 tree cmp
= build_zero_cst (m_limb_type
);
4585 for (unsigned i
= 0; i
< cnt
; i
++)
4587 tree idx
, idx_next
= NULL_TREE
;
4589 idx
= size_int (startlimb
);
4591 idx
= size_int (endlimb
);
4593 idx
= create_loop (size_int (startlimb
+ 1), &idx_next
);
4595 idx
= size_int (startlimb
+ 1);
4596 tree l
= limb_access (NULL_TREE
, var
? var
: obj
, idx
, true);
4597 g
= gimple_build_assign (make_ssa_name (m_limb_type
), l
);
4599 l
= gimple_assign_lhs (g
);
4600 if (i
== 0 || i
== 2)
4601 l
= arith_overflow_extract_bits (start
, end
, l
,
4604 if (i
== 0 && !check_zero
)
4607 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
4609 build_int_cst (m_limb_type
, 1));
4611 g
= gimple_build_cond (GT_EXPR
, gimple_assign_lhs (g
),
4612 build_int_cst (m_limb_type
, 1),
4613 NULL_TREE
, NULL_TREE
);
4616 g
= gimple_build_cond (NE_EXPR
, l
, cmp
, NULL_TREE
, NULL_TREE
);
4618 edge e1
= split_block (gsi_bb (m_gsi
), g
);
4619 e1
->flags
= EDGE_FALSE_VALUE
;
4620 edge e2
= make_edge (e1
->src
, gimple_bb (final_stmt
),
4622 e1
->probability
= profile_probability::likely ();
4623 e2
->probability
= e1
->probability
.invert ();
4625 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e2
->src
);
4626 m_gsi
= gsi_after_labels (e1
->dest
);
4627 if (i
== 1 && use_loop
)
4629 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
,
4632 g
= gimple_build_cond (NE_EXPR
, idx_next
,
4633 size_int (endlimb
+ (cnt
== 2)),
4634 NULL_TREE
, NULL_TREE
);
4636 edge true_edge
, false_edge
;
4637 extract_true_false_edges_from_block (gsi_bb (m_gsi
),
4640 m_gsi
= gsi_after_labels (false_edge
->dest
);
4645 ovf
= make_ssa_name (boolean_type_node
);
4646 basic_block bb
= gimple_bb (final_stmt
);
4647 gphi
*phi
= create_phi_node (ovf
, bb
);
4648 edge e1
= find_edge (gsi_bb (m_gsi
), bb
);
4650 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
4652 tree val
= e
== e1
? boolean_false_node
: boolean_true_node
;
4653 add_phi_arg (phi
, val
, e
, UNKNOWN_LOCATION
);
4655 m_gsi
= gsi_for_stmt (final_stmt
);
4659 finish_arith_overflow (var
, obj
, type
, ovf
, lhs
, orig_obj
, stmt
, MULT_EXPR
);
4662 /* Lower REALPART_EXPR or IMAGPART_EXPR stmt extracting part of result from
4663 .{ADD,SUB,MUL}_OVERFLOW call. */
4666 bitint_large_huge::lower_cplxpart_stmt (tree obj
, gimple
*stmt
)
4668 tree rhs1
= gimple_assign_rhs1 (stmt
);
4669 rhs1
= TREE_OPERAND (rhs1
, 0);
4670 if (obj
== NULL_TREE
)
4672 int part
= var_to_partition (m_map
, gimple_assign_lhs (stmt
));
4673 gcc_assert (m_vars
[part
] != NULL_TREE
);
4676 if (TREE_CODE (rhs1
) == SSA_NAME
4678 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (rhs1
))))
4680 lower_call (obj
, SSA_NAME_DEF_STMT (rhs1
));
4683 int part
= var_to_partition (m_map
, rhs1
);
4684 gcc_assert (m_vars
[part
] != NULL_TREE
);
4685 tree var
= m_vars
[part
];
4686 unsigned HOST_WIDE_INT nelts
4687 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj
))) / limb_prec
;
4688 tree atype
= build_array_type_nelts (m_limb_type
, nelts
);
4689 if (!useless_type_conversion_p (atype
, TREE_TYPE (obj
)))
4690 obj
= build1 (VIEW_CONVERT_EXPR
, atype
, obj
);
4691 tree off
= build_int_cst (build_pointer_type (TREE_TYPE (var
)),
4692 gimple_assign_rhs_code (stmt
) == REALPART_EXPR
4693 ? 0 : nelts
* m_limb_size
);
4694 tree v2
= build2 (MEM_REF
, atype
, build_fold_addr_expr (var
), off
);
4695 gimple
*g
= gimple_build_assign (obj
, v2
);
4699 /* Lower COMPLEX_EXPR stmt. */
4702 bitint_large_huge::lower_complexexpr_stmt (gimple
*stmt
)
4704 tree lhs
= gimple_assign_lhs (stmt
);
4705 tree rhs1
= gimple_assign_rhs1 (stmt
);
4706 tree rhs2
= gimple_assign_rhs2 (stmt
);
4707 int part
= var_to_partition (m_map
, lhs
);
4708 gcc_assert (m_vars
[part
] != NULL_TREE
);
4710 unsigned HOST_WIDE_INT nelts
4711 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (rhs1
))) / limb_prec
;
4712 tree atype
= build_array_type_nelts (m_limb_type
, nelts
);
4713 tree zero
= build_zero_cst (build_pointer_type (TREE_TYPE (lhs
)));
4714 tree v1
= build2 (MEM_REF
, atype
, build_fold_addr_expr (lhs
), zero
);
4716 if (TREE_CODE (rhs1
) == SSA_NAME
)
4718 part
= var_to_partition (m_map
, rhs1
);
4719 gcc_assert (m_vars
[part
] != NULL_TREE
);
4722 else if (integer_zerop (rhs1
))
4723 v2
= build_zero_cst (atype
);
4725 v2
= tree_output_constant_def (rhs1
);
4726 if (!useless_type_conversion_p (atype
, TREE_TYPE (v2
)))
4727 v2
= build1 (VIEW_CONVERT_EXPR
, atype
, v2
);
4728 gimple
*g
= gimple_build_assign (v1
, v2
);
4730 tree off
= fold_convert (build_pointer_type (TREE_TYPE (lhs
)),
4731 TYPE_SIZE_UNIT (atype
));
4732 v1
= build2 (MEM_REF
, atype
, build_fold_addr_expr (lhs
), off
);
4733 if (TREE_CODE (rhs2
) == SSA_NAME
)
4735 part
= var_to_partition (m_map
, rhs2
);
4736 gcc_assert (m_vars
[part
] != NULL_TREE
);
4739 else if (integer_zerop (rhs2
))
4740 v2
= build_zero_cst (atype
);
4742 v2
= tree_output_constant_def (rhs2
);
4743 if (!useless_type_conversion_p (atype
, TREE_TYPE (v2
)))
4744 v2
= build1 (VIEW_CONVERT_EXPR
, atype
, v2
);
4745 g
= gimple_build_assign (v1
, v2
);
4749 /* Lower a .{CLZ,CTZ,CLRSB,FFS,PARITY,POPCOUNT} call with one large/huge _BitInt
4753 bitint_large_huge::lower_bit_query (gimple
*stmt
)
4755 tree arg0
= gimple_call_arg (stmt
, 0);
4756 tree arg1
= (gimple_call_num_args (stmt
) == 2
4757 ? gimple_call_arg (stmt
, 1) : NULL_TREE
);
4758 tree lhs
= gimple_call_lhs (stmt
);
4763 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
4764 gsi_remove (&gsi
, true);
4767 tree type
= TREE_TYPE (arg0
);
4768 gcc_assert (TREE_CODE (type
) == BITINT_TYPE
);
4769 bitint_prec_kind kind
= bitint_precision_kind (type
);
4770 gcc_assert (kind
>= bitint_prec_large
);
4771 enum internal_fn ifn
= gimple_call_internal_fn (stmt
);
4772 enum built_in_function fcode
= END_BUILTINS
;
4773 gcc_assert (TYPE_PRECISION (unsigned_type_node
) == limb_prec
4774 || TYPE_PRECISION (long_unsigned_type_node
) == limb_prec
4775 || TYPE_PRECISION (long_long_unsigned_type_node
) == limb_prec
);
4779 if (TYPE_PRECISION (unsigned_type_node
) == limb_prec
)
4780 fcode
= BUILT_IN_CLZ
;
4781 else if (TYPE_PRECISION (long_unsigned_type_node
) == limb_prec
)
4782 fcode
= BUILT_IN_CLZL
;
4784 fcode
= BUILT_IN_CLZLL
;
4787 /* .FFS (X) is .CTZ (X, -1) + 1, though under the hood
4788 we don't add the addend at the end. */
4789 arg1
= integer_zero_node
;
4792 if (TYPE_PRECISION (unsigned_type_node
) == limb_prec
)
4793 fcode
= BUILT_IN_CTZ
;
4794 else if (TYPE_PRECISION (long_unsigned_type_node
) == limb_prec
)
4795 fcode
= BUILT_IN_CTZL
;
4797 fcode
= BUILT_IN_CTZLL
;
4801 if (TYPE_PRECISION (unsigned_type_node
) == limb_prec
)
4802 fcode
= BUILT_IN_CLRSB
;
4803 else if (TYPE_PRECISION (long_unsigned_type_node
) == limb_prec
)
4804 fcode
= BUILT_IN_CLRSBL
;
4806 fcode
= BUILT_IN_CLRSBLL
;
4809 if (TYPE_PRECISION (unsigned_type_node
) == limb_prec
)
4810 fcode
= BUILT_IN_PARITY
;
4811 else if (TYPE_PRECISION (long_unsigned_type_node
) == limb_prec
)
4812 fcode
= BUILT_IN_PARITYL
;
4814 fcode
= BUILT_IN_PARITYLL
;
4818 if (TYPE_PRECISION (unsigned_type_node
) == limb_prec
)
4819 fcode
= BUILT_IN_POPCOUNT
;
4820 else if (TYPE_PRECISION (long_unsigned_type_node
) == limb_prec
)
4821 fcode
= BUILT_IN_POPCOUNTL
;
4823 fcode
= BUILT_IN_POPCOUNTLL
;
4829 tree fndecl
= builtin_decl_explicit (fcode
), res
= NULL_TREE
;
4830 unsigned cnt
= 0, rem
= 0, end
= 0, prec
= TYPE_PRECISION (type
);
4831 struct bq_details
{ edge e
; tree val
, addend
; } *bqp
= NULL
;
4832 basic_block edge_bb
= NULL
;
4835 tree idx
= NULL_TREE
, idx_first
= NULL_TREE
, idx_next
= NULL_TREE
;
4836 if (kind
== bitint_prec_large
)
4837 cnt
= CEIL (prec
, limb_prec
);
4840 rem
= (prec
% (2 * limb_prec
));
4841 end
= (prec
- rem
) / limb_prec
;
4842 cnt
= 2 + CEIL (rem
, limb_prec
);
4843 idx
= idx_first
= create_loop (size_zero_node
, &idx_next
);
4846 if (ifn
== IFN_CTZ
|| ifn
== IFN_FFS
)
4848 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
4850 edge e
= split_block (gsi_bb (gsi
), gsi_stmt (gsi
));
4852 if (kind
== bitint_prec_large
)
4853 m_gsi
= gsi_end_bb (edge_bb
);
4854 bqp
= XALLOCAVEC (struct bq_details
, cnt
);
4857 m_after_stmt
= stmt
;
4858 if (kind
!= bitint_prec_large
)
4859 m_upwards_2limb
= end
;
4861 for (unsigned i
= 0; i
< cnt
; i
++)
4864 if (kind
== bitint_prec_large
)
4867 idx
= size_int (end
+ (i
> 2));
4869 tree rhs1
= handle_operand (arg0
, idx
);
4870 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (rhs1
)))
4872 if (!TYPE_UNSIGNED (TREE_TYPE (rhs1
)))
4873 rhs1
= add_cast (unsigned_type_for (TREE_TYPE (rhs1
)), rhs1
);
4874 rhs1
= add_cast (m_limb_type
, rhs1
);
4878 if (ifn
== IFN_PARITY
)
4879 in
= prepare_data_in_out (build_zero_cst (m_limb_type
), idx
, &out
);
4880 else if (ifn
== IFN_FFS
)
4881 in
= prepare_data_in_out (integer_one_node
, idx
, &out
);
4883 in
= prepare_data_in_out (integer_zero_node
, idx
, &out
);
4889 g
= gimple_build_cond (NE_EXPR
, rhs1
,
4890 build_zero_cst (m_limb_type
),
4891 NULL_TREE
, NULL_TREE
);
4894 e1
= split_block (gsi_bb (m_gsi
), g
);
4895 e1
->flags
= EDGE_FALSE_VALUE
;
4896 e2
= make_edge (e1
->src
, gimple_bb (stmt
), EDGE_TRUE_VALUE
);
4897 e1
->probability
= profile_probability::unlikely ();
4898 e2
->probability
= e1
->probability
.invert ();
4900 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e2
->src
);
4901 m_gsi
= gsi_after_labels (e1
->dest
);
4904 if (tree_fits_uhwi_p (idx
))
4906 = build_int_cst (integer_type_node
,
4907 tree_to_uhwi (idx
) * limb_prec
4908 + (ifn
== IFN_FFS
));
4915 res
= make_ssa_name (integer_type_node
);
4916 g
= gimple_build_assign (res
, PLUS_EXPR
, in
,
4917 build_int_cst (integer_type_node
,
4920 m_data
[m_data_cnt
] = res
;
4924 if (!integer_zerop (in
))
4926 if (kind
== bitint_prec_huge
&& i
== 1)
4929 res
= make_ssa_name (m_limb_type
);
4930 g
= gimple_build_assign (res
, BIT_XOR_EXPR
, in
, rhs1
);
4935 m_data
[m_data_cnt
] = res
;
4938 g
= gimple_build_call (fndecl
, 1, rhs1
);
4939 tem
= make_ssa_name (integer_type_node
);
4940 gimple_call_set_lhs (g
, tem
);
4942 if (!integer_zerop (in
))
4944 if (kind
== bitint_prec_huge
&& i
== 1)
4947 res
= make_ssa_name (integer_type_node
);
4948 g
= gimple_build_assign (res
, PLUS_EXPR
, in
, tem
);
4953 m_data
[m_data_cnt
] = res
;
4960 if (kind
== bitint_prec_huge
&& i
<= 1)
4964 idx
= make_ssa_name (sizetype
);
4965 g
= gimple_build_assign (idx
, PLUS_EXPR
, idx_first
,
4971 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx_first
,
4974 g
= gimple_build_cond (NE_EXPR
, idx_next
, size_int (end
),
4975 NULL_TREE
, NULL_TREE
);
4977 if (ifn
== IFN_CTZ
|| ifn
== IFN_FFS
)
4978 m_gsi
= gsi_after_labels (edge_bb
);
4980 m_gsi
= gsi_for_stmt (stmt
);
4988 tree idx
= NULL_TREE
, idx_next
= NULL_TREE
, first
= NULL_TREE
;
4990 if (kind
== bitint_prec_large
)
4991 cnt
= CEIL (prec
, limb_prec
);
4994 rem
= prec
% limb_prec
;
4995 if (rem
== 0 && (!TYPE_UNSIGNED (type
) || ifn
== IFN_CLRSB
))
4997 end
= (prec
- rem
) / limb_prec
;
4998 cnt
= 1 + (rem
!= 0);
4999 if (ifn
== IFN_CLRSB
)
5003 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
5005 edge e
= split_block (gsi_bb (gsi
), gsi_stmt (gsi
));
5007 m_gsi
= gsi_end_bb (edge_bb
);
5010 bqp
= XALLOCAVEC (struct bq_details
, cnt
);
5013 gsi
= gsi_for_stmt (stmt
);
5015 e
= split_block (gsi_bb (gsi
), gsi_stmt (gsi
));
5017 bqp
= XALLOCAVEC (struct bq_details
, 2 * cnt
);
5020 for (unsigned i
= 0; i
< cnt
; i
++)
5023 if (kind
== bitint_prec_large
)
5024 idx
= size_int (cnt
- i
- 1);
5025 else if (i
== cnt
- 1)
5026 idx
= create_loop (size_int (end
- 1), &idx_next
);
5028 idx
= size_int (end
);
5030 tree rhs1
= handle_operand (arg0
, idx
);
5031 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (rhs1
)))
5033 if (ifn
== IFN_CLZ
&& !TYPE_UNSIGNED (TREE_TYPE (rhs1
)))
5034 rhs1
= add_cast (unsigned_type_for (TREE_TYPE (rhs1
)), rhs1
);
5035 else if (ifn
== IFN_CLRSB
&& TYPE_UNSIGNED (TREE_TYPE (rhs1
)))
5036 rhs1
= add_cast (signed_type_for (TREE_TYPE (rhs1
)), rhs1
);
5037 rhs1
= add_cast (m_limb_type
, rhs1
);
5042 g
= gimple_build_cond (NE_EXPR
, rhs1
,
5043 build_zero_cst (m_limb_type
),
5044 NULL_TREE
, NULL_TREE
);
5046 edge e1
= split_block (gsi_bb (m_gsi
), g
);
5047 e1
->flags
= EDGE_FALSE_VALUE
;
5048 edge e2
= make_edge (e1
->src
, gimple_bb (stmt
), EDGE_TRUE_VALUE
);
5049 e1
->probability
= profile_probability::unlikely ();
5050 e2
->probability
= e1
->probability
.invert ();
5052 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e2
->src
);
5053 m_gsi
= gsi_after_labels (e1
->dest
);
5062 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
5064 build_int_cst (m_limb_type
, 1));
5066 g
= gimple_build_cond (GT_EXPR
, gimple_assign_lhs (g
),
5067 build_int_cst (m_limb_type
, 1),
5068 NULL_TREE
, NULL_TREE
);
5073 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
5074 BIT_XOR_EXPR
, rhs1
, first
);
5076 tree stype
= signed_type_for (m_limb_type
);
5077 g
= gimple_build_cond (LT_EXPR
,
5079 gimple_assign_lhs (g
)),
5080 build_zero_cst (stype
),
5081 NULL_TREE
, NULL_TREE
);
5083 edge e1
= split_block (gsi_bb (m_gsi
), g
);
5084 e1
->flags
= EDGE_FALSE_VALUE
;
5085 edge e2
= make_edge (e1
->src
, gimple_bb (stmt
),
5087 e1
->probability
= profile_probability::unlikely ();
5088 e2
->probability
= e1
->probability
.invert ();
5090 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
,
5092 m_gsi
= gsi_after_labels (e1
->dest
);
5094 g
= gimple_build_cond (NE_EXPR
, rhs1
, first
,
5095 NULL_TREE
, NULL_TREE
);
5098 edge e1
= split_block (gsi_bb (m_gsi
), g
);
5099 e1
->flags
= EDGE_FALSE_VALUE
;
5100 edge e2
= make_edge (e1
->src
, edge_bb
, EDGE_TRUE_VALUE
);
5101 e1
->probability
= profile_probability::unlikely ();
5102 e2
->probability
= e1
->probability
.invert ();
5104 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e2
->src
);
5105 m_gsi
= gsi_after_labels (e1
->dest
);
5106 bqp
[2 * i
+ 1].e
= e2
;
5109 if (tree_fits_uhwi_p (idx
))
5111 = build_int_cst (integer_type_node
,
5113 - (((int) tree_to_uhwi (idx
) + 1)
5114 * limb_prec
) - sub_one
);
5118 in
= build_int_cst (integer_type_node
, rem
- sub_one
);
5120 in
= prepare_data_in_out (in
, idx
, &out
);
5121 out
= m_data
[m_data_cnt
+ 1];
5123 g
= gimple_build_assign (out
, PLUS_EXPR
, in
,
5124 build_int_cst (integer_type_node
,
5127 m_data
[m_data_cnt
] = out
;
5131 if (kind
== bitint_prec_huge
&& i
== cnt
- 1)
5133 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
,
5136 g
= gimple_build_cond (NE_EXPR
, idx
, size_zero_node
,
5137 NULL_TREE
, NULL_TREE
);
5139 edge true_edge
, false_edge
;
5140 extract_true_false_edges_from_block (gsi_bb (m_gsi
),
5141 &true_edge
, &false_edge
);
5142 m_gsi
= gsi_after_labels (false_edge
->dest
);
5152 gphi
*phi1
, *phi2
, *phi3
;
5154 bb
= gsi_bb (m_gsi
);
5155 remove_edge (find_edge (bb
, gimple_bb (stmt
)));
5156 phi1
= create_phi_node (make_ssa_name (m_limb_type
),
5158 phi2
= create_phi_node (make_ssa_name (integer_type_node
),
5160 for (unsigned i
= 0; i
< cnt
; i
++)
5162 add_phi_arg (phi1
, bqp
[i
].val
, bqp
[i
].e
, UNKNOWN_LOCATION
);
5163 add_phi_arg (phi2
, bqp
[i
].addend
, bqp
[i
].e
, UNKNOWN_LOCATION
);
5165 if (arg1
== NULL_TREE
)
5167 g
= gimple_build_builtin_unreachable (m_loc
);
5170 m_gsi
= gsi_for_stmt (stmt
);
5171 g
= gimple_build_call (fndecl
, 1, gimple_phi_result (phi1
));
5172 gimple_call_set_lhs (g
, make_ssa_name (integer_type_node
));
5174 if (arg1
== NULL_TREE
)
5175 g
= gimple_build_assign (lhs
, PLUS_EXPR
,
5176 gimple_phi_result (phi2
),
5177 gimple_call_lhs (g
));
5180 g
= gimple_build_assign (make_ssa_name (integer_type_node
),
5181 PLUS_EXPR
, gimple_phi_result (phi2
),
5182 gimple_call_lhs (g
));
5184 edge e1
= split_block (gimple_bb (stmt
), g
);
5185 edge e2
= make_edge (bb
, e1
->dest
, EDGE_FALLTHRU
);
5186 e2
->probability
= profile_probability::always ();
5187 set_immediate_dominator (CDI_DOMINATORS
, e1
->dest
,
5188 get_immediate_dominator (CDI_DOMINATORS
,
5190 phi3
= create_phi_node (make_ssa_name (integer_type_node
), e1
->dest
);
5191 add_phi_arg (phi3
, gimple_assign_lhs (g
), e1
, UNKNOWN_LOCATION
);
5192 add_phi_arg (phi3
, arg1
, e2
, UNKNOWN_LOCATION
);
5193 m_gsi
= gsi_for_stmt (stmt
);
5194 g
= gimple_build_assign (lhs
, gimple_phi_result (phi3
));
5196 gsi_replace (&m_gsi
, g
, true);
5199 bb
= gsi_bb (m_gsi
);
5200 remove_edge (find_edge (bb
, edge_bb
));
5202 e
= make_edge (bb
, gimple_bb (stmt
), EDGE_FALLTHRU
);
5203 e
->probability
= profile_probability::always ();
5204 set_immediate_dominator (CDI_DOMINATORS
, gimple_bb (stmt
),
5205 get_immediate_dominator (CDI_DOMINATORS
,
5207 phi1
= create_phi_node (make_ssa_name (m_limb_type
),
5209 phi2
= create_phi_node (make_ssa_name (integer_type_node
),
5211 phi3
= create_phi_node (make_ssa_name (integer_type_node
),
5213 for (unsigned i
= 0; i
< cnt
; i
++)
5215 add_phi_arg (phi1
, bqp
[i
].val
, bqp
[2 * i
+ 1].e
, UNKNOWN_LOCATION
);
5216 add_phi_arg (phi2
, bqp
[i
].addend
, bqp
[2 * i
+ 1].e
,
5218 tree a
= bqp
[i
].addend
;
5219 if (i
&& kind
== bitint_prec_large
)
5220 a
= int_const_binop (PLUS_EXPR
, a
, integer_minus_one_node
);
5222 add_phi_arg (phi3
, a
, bqp
[2 * i
].e
, UNKNOWN_LOCATION
);
5224 add_phi_arg (phi3
, build_int_cst (integer_type_node
, prec
- 1), e
,
5226 m_gsi
= gsi_after_labels (edge_bb
);
5227 g
= gimple_build_call (fndecl
, 1,
5228 add_cast (signed_type_for (m_limb_type
),
5229 gimple_phi_result (phi1
)));
5230 gimple_call_set_lhs (g
, make_ssa_name (integer_type_node
));
5232 g
= gimple_build_assign (make_ssa_name (integer_type_node
),
5233 PLUS_EXPR
, gimple_call_lhs (g
),
5234 gimple_phi_result (phi2
));
5236 if (kind
!= bitint_prec_large
)
5238 g
= gimple_build_assign (make_ssa_name (integer_type_node
),
5239 PLUS_EXPR
, gimple_assign_lhs (g
),
5243 add_phi_arg (phi3
, gimple_assign_lhs (g
),
5244 find_edge (edge_bb
, gimple_bb (stmt
)), UNKNOWN_LOCATION
);
5245 m_gsi
= gsi_for_stmt (stmt
);
5246 g
= gimple_build_assign (lhs
, gimple_phi_result (phi3
));
5247 gsi_replace (&m_gsi
, g
, true);
5250 g
= gimple_build_call (fndecl
, 1, res
);
5251 gimple_call_set_lhs (g
, lhs
);
5252 gsi_replace (&m_gsi
, g
, true);
5255 g
= gimple_build_assign (lhs
, res
);
5256 gsi_replace (&m_gsi
, g
, true);
5263 /* Lower a call statement with one or more large/huge _BitInt
5264 arguments or large/huge _BitInt return value. */
5267 bitint_large_huge::lower_call (tree obj
, gimple
*stmt
)
5269 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
5270 unsigned int nargs
= gimple_call_num_args (stmt
);
5271 if (gimple_call_internal_p (stmt
))
5272 switch (gimple_call_internal_fn (stmt
))
5274 case IFN_ADD_OVERFLOW
:
5275 case IFN_SUB_OVERFLOW
:
5276 case IFN_UBSAN_CHECK_ADD
:
5277 case IFN_UBSAN_CHECK_SUB
:
5278 lower_addsub_overflow (obj
, stmt
);
5280 case IFN_MUL_OVERFLOW
:
5281 case IFN_UBSAN_CHECK_MUL
:
5282 lower_mul_overflow (obj
, stmt
);
5290 lower_bit_query (stmt
);
5295 bool returns_twice
= (gimple_call_flags (stmt
) & ECF_RETURNS_TWICE
) != 0;
5296 for (unsigned int i
= 0; i
< nargs
; ++i
)
5298 tree arg
= gimple_call_arg (stmt
, i
);
5299 if (TREE_CODE (arg
) != SSA_NAME
5300 || TREE_CODE (TREE_TYPE (arg
)) != BITINT_TYPE
5301 || bitint_precision_kind (TREE_TYPE (arg
)) <= bitint_prec_middle
)
5303 if (SSA_NAME_IS_DEFAULT_DEF (arg
)
5304 && (!SSA_NAME_VAR (arg
) || VAR_P (SSA_NAME_VAR (arg
))))
5306 tree var
= create_tmp_reg (TREE_TYPE (arg
));
5307 arg
= get_or_create_ssa_default_def (cfun
, var
);
5311 int p
= var_to_partition (m_map
, arg
);
5313 gcc_assert (v
!= NULL_TREE
);
5314 if (!types_compatible_p (TREE_TYPE (arg
), TREE_TYPE (v
)))
5315 v
= build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (arg
), v
);
5316 arg
= make_ssa_name (TREE_TYPE (arg
));
5317 gimple
*g
= gimple_build_assign (arg
, v
);
5318 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
5319 if (returns_twice
&& bb_has_abnormal_pred (gimple_bb (stmt
)))
5321 m_returns_twice_calls
.safe_push (stmt
);
5322 returns_twice
= false;
5325 gimple_call_set_arg (stmt
, i
, arg
);
5326 if (m_preserved
== NULL
)
5327 m_preserved
= BITMAP_ALLOC (NULL
);
5328 bitmap_set_bit (m_preserved
, SSA_NAME_VERSION (arg
));
5330 tree lhs
= gimple_call_lhs (stmt
);
5332 && TREE_CODE (lhs
) == SSA_NAME
5333 && TREE_CODE (TREE_TYPE (lhs
)) == BITINT_TYPE
5334 && bitint_precision_kind (TREE_TYPE (lhs
)) >= bitint_prec_large
)
5336 int p
= var_to_partition (m_map
, lhs
);
5338 gcc_assert (v
!= NULL_TREE
);
5339 if (!types_compatible_p (TREE_TYPE (lhs
), TREE_TYPE (v
)))
5340 v
= build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (lhs
), v
);
5341 gimple_call_set_lhs (stmt
, v
);
5342 SSA_NAME_DEF_STMT (lhs
) = gimple_build_nop ();
5347 /* Lower __asm STMT which involves large/huge _BitInt values. */
5350 bitint_large_huge::lower_asm (gimple
*stmt
)
5352 gasm
*g
= as_a
<gasm
*> (stmt
);
5353 unsigned noutputs
= gimple_asm_noutputs (g
);
5354 unsigned ninputs
= gimple_asm_ninputs (g
);
5356 for (unsigned i
= 0; i
< noutputs
; ++i
)
5358 tree t
= gimple_asm_output_op (g
, i
);
5359 tree s
= TREE_VALUE (t
);
5360 if (TREE_CODE (s
) == SSA_NAME
5361 && TREE_CODE (TREE_TYPE (s
)) == BITINT_TYPE
5362 && bitint_precision_kind (TREE_TYPE (s
)) >= bitint_prec_large
)
5364 int part
= var_to_partition (m_map
, s
);
5365 gcc_assert (m_vars
[part
] != NULL_TREE
);
5366 TREE_VALUE (t
) = m_vars
[part
];
5369 for (unsigned i
= 0; i
< ninputs
; ++i
)
5371 tree t
= gimple_asm_input_op (g
, i
);
5372 tree s
= TREE_VALUE (t
);
5373 if (TREE_CODE (s
) == SSA_NAME
5374 && TREE_CODE (TREE_TYPE (s
)) == BITINT_TYPE
5375 && bitint_precision_kind (TREE_TYPE (s
)) >= bitint_prec_large
)
5377 if (SSA_NAME_IS_DEFAULT_DEF (s
)
5378 && (!SSA_NAME_VAR (s
) || VAR_P (SSA_NAME_VAR (s
))))
5380 TREE_VALUE (t
) = create_tmp_var (TREE_TYPE (s
), "bitint");
5381 mark_addressable (TREE_VALUE (t
));
5385 int part
= var_to_partition (m_map
, s
);
5386 gcc_assert (m_vars
[part
] != NULL_TREE
);
5387 TREE_VALUE (t
) = m_vars
[part
];
5394 /* Lower statement STMT which involves large/huge _BitInt values
5395 into code accessing individual limbs. */
5398 bitint_large_huge::lower_stmt (gimple
*stmt
)
5402 m_data
.truncate (0);
5404 m_gsi
= gsi_for_stmt (stmt
);
5405 m_after_stmt
= NULL
;
5408 gsi_prev (&m_init_gsi
);
5409 m_preheader_bb
= NULL
;
5410 m_upwards_2limb
= 0;
5413 m_cast_conditional
= false;
5415 m_loc
= gimple_location (stmt
);
5416 if (is_gimple_call (stmt
))
5418 lower_call (NULL_TREE
, stmt
);
5421 if (gimple_code (stmt
) == GIMPLE_ASM
)
5426 tree lhs
= NULL_TREE
, cmp_op1
= NULL_TREE
, cmp_op2
= NULL_TREE
;
5427 tree_code cmp_code
= comparison_op (stmt
, &cmp_op1
, &cmp_op2
);
5428 bool eq_p
= (cmp_code
== EQ_EXPR
|| cmp_code
== NE_EXPR
);
5429 bool mergeable_cast_p
= false;
5430 bool final_cast_p
= false;
5431 if (gimple_assign_cast_p (stmt
))
5433 lhs
= gimple_assign_lhs (stmt
);
5434 tree rhs1
= gimple_assign_rhs1 (stmt
);
5435 if (TREE_CODE (rhs1
) == VIEW_CONVERT_EXPR
)
5436 rhs1
= TREE_OPERAND (rhs1
, 0);
5437 if (TREE_CODE (TREE_TYPE (lhs
)) == BITINT_TYPE
5438 && bitint_precision_kind (TREE_TYPE (lhs
)) >= bitint_prec_large
5439 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1
)))
5440 mergeable_cast_p
= true;
5441 else if (TREE_CODE (TREE_TYPE (rhs1
)) == BITINT_TYPE
5442 && bitint_precision_kind (TREE_TYPE (rhs1
)) >= bitint_prec_large
5443 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
5444 || POINTER_TYPE_P (TREE_TYPE (lhs
))
5445 || gimple_assign_rhs_code (stmt
) == VIEW_CONVERT_EXPR
))
5447 final_cast_p
= true;
5448 if (((TREE_CODE (TREE_TYPE (lhs
)) == INTEGER_TYPE
5449 && TYPE_PRECISION (TREE_TYPE (lhs
)) > MAX_FIXED_MODE_SIZE
)
5450 || (!INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
5451 && !POINTER_TYPE_P (TREE_TYPE (lhs
))))
5452 && gimple_assign_rhs_code (stmt
) == VIEW_CONVERT_EXPR
)
5454 /* Handle VIEW_CONVERT_EXPRs to not generally supported
5455 huge INTEGER_TYPEs like uint256_t or uint512_t. These
5456 are usually emitted from memcpy folding and backends
5457 support moves with them but that is usually it.
5458 Similarly handle VCEs to vector/complex types etc. */
5459 gcc_assert (TREE_CODE (rhs1
) == SSA_NAME
);
5460 if (SSA_NAME_IS_DEFAULT_DEF (rhs1
)
5461 && (!SSA_NAME_VAR (rhs1
) || VAR_P (SSA_NAME_VAR (rhs1
))))
5463 tree var
= create_tmp_reg (TREE_TYPE (lhs
));
5464 rhs1
= get_or_create_ssa_default_def (cfun
, var
);
5465 gimple_assign_set_rhs1 (stmt
, rhs1
);
5466 gimple_assign_set_rhs_code (stmt
, SSA_NAME
);
5468 else if (m_names
== NULL
5469 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (rhs1
)))
5471 gimple
*g
= SSA_NAME_DEF_STMT (rhs1
);
5472 gcc_assert (gimple_assign_load_p (g
));
5473 tree mem
= gimple_assign_rhs1 (g
);
5474 tree ltype
= TREE_TYPE (lhs
);
5475 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (mem
));
5476 if (as
!= TYPE_ADDR_SPACE (ltype
))
5478 = build_qualified_type (ltype
,
5480 | ENCODE_QUAL_ADDR_SPACE (as
));
5481 rhs1
= build1 (VIEW_CONVERT_EXPR
, ltype
, unshare_expr (mem
));
5482 gimple_assign_set_rhs1 (stmt
, rhs1
);
5486 int part
= var_to_partition (m_map
, rhs1
);
5487 gcc_assert (m_vars
[part
] != NULL_TREE
);
5488 rhs1
= build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (lhs
),
5490 gimple_assign_set_rhs1 (stmt
, rhs1
);
5495 if (TREE_CODE (rhs1
) == SSA_NAME
5497 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (rhs1
))))
5499 gimple
*g
= SSA_NAME_DEF_STMT (rhs1
);
5500 if (is_gimple_assign (g
)
5501 && gimple_assign_rhs_code (g
) == IMAGPART_EXPR
)
5503 tree rhs2
= TREE_OPERAND (gimple_assign_rhs1 (g
), 0);
5504 if (TREE_CODE (rhs2
) == SSA_NAME
5506 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (rhs2
))))
5508 g
= SSA_NAME_DEF_STMT (rhs2
);
5509 int ovf
= optimizable_arith_overflow (g
);
5511 /* If .{ADD,SUB,MUL}_OVERFLOW has both REALPART_EXPR
5512 and IMAGPART_EXPR uses, where the latter is cast to
5513 non-_BitInt, it will be optimized when handling
5514 the REALPART_EXPR. */
5518 lower_call (NULL_TREE
, g
);
5525 else if (TREE_CODE (TREE_TYPE (lhs
)) == BITINT_TYPE
5526 && bitint_precision_kind (TREE_TYPE (lhs
)) >= bitint_prec_large
5527 && !INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
5528 && !POINTER_TYPE_P (TREE_TYPE (rhs1
))
5529 && gimple_assign_rhs_code (stmt
) == VIEW_CONVERT_EXPR
)
5531 int part
= var_to_partition (m_map
, lhs
);
5532 gcc_assert (m_vars
[part
] != NULL_TREE
);
5533 lhs
= build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (rhs1
), m_vars
[part
]);
5534 insert_before (gimple_build_assign (lhs
, rhs1
));
5538 if (gimple_store_p (stmt
))
5540 tree rhs1
= gimple_assign_rhs1 (stmt
);
5541 if (TREE_CODE (rhs1
) == SSA_NAME
5543 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (rhs1
))))
5545 gimple
*g
= SSA_NAME_DEF_STMT (rhs1
);
5546 m_loc
= gimple_location (g
);
5547 lhs
= gimple_assign_lhs (stmt
);
5548 if (is_gimple_assign (g
) && !mergeable_op (g
))
5549 switch (gimple_assign_rhs_code (g
))
5553 lower_shift_stmt (lhs
, g
);
5555 m_gsi
= gsi_for_stmt (stmt
);
5556 unlink_stmt_vdef (stmt
);
5557 release_ssa_name (gimple_vdef (stmt
));
5558 gsi_remove (&m_gsi
, true);
5561 case TRUNC_DIV_EXPR
:
5562 case TRUNC_MOD_EXPR
:
5563 lower_muldiv_stmt (lhs
, g
);
5565 case FIX_TRUNC_EXPR
:
5566 lower_float_conv_stmt (lhs
, g
);
5570 lower_cplxpart_stmt (lhs
, g
);
5572 case VIEW_CONVERT_EXPR
:
5574 tree rhs1
= gimple_assign_rhs1 (g
);
5575 rhs1
= TREE_OPERAND (rhs1
, 0);
5576 if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
5577 && !POINTER_TYPE_P (TREE_TYPE (rhs1
)))
5579 tree ltype
= TREE_TYPE (rhs1
);
5580 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (lhs
));
5582 = build_qualified_type (ltype
,
5583 TYPE_QUALS (TREE_TYPE (lhs
))
5584 | ENCODE_QUAL_ADDR_SPACE (as
));
5585 lhs
= build1 (VIEW_CONVERT_EXPR
, ltype
, lhs
);
5586 gimple_assign_set_lhs (stmt
, lhs
);
5587 gimple_assign_set_rhs1 (stmt
, rhs1
);
5588 gimple_assign_set_rhs_code (stmt
, TREE_CODE (rhs1
));
5597 else if (optimizable_arith_overflow (g
) == 3)
5599 lower_call (lhs
, g
);
5602 m_loc
= gimple_location (stmt
);
5605 if (mergeable_op (stmt
)
5606 || gimple_store_p (stmt
)
5607 || gimple_assign_load_p (stmt
)
5609 || mergeable_cast_p
)
5611 lhs
= lower_mergeable_stmt (stmt
, cmp_code
, cmp_op1
, cmp_op2
);
5615 else if (cmp_code
!= ERROR_MARK
)
5616 lhs
= lower_comparison_stmt (stmt
, cmp_code
, cmp_op1
, cmp_op2
);
5617 if (cmp_code
!= ERROR_MARK
)
5619 if (gimple_code (stmt
) == GIMPLE_COND
)
5621 gcond
*cstmt
= as_a
<gcond
*> (stmt
);
5622 gimple_cond_set_lhs (cstmt
, lhs
);
5623 gimple_cond_set_rhs (cstmt
, boolean_false_node
);
5624 gimple_cond_set_code (cstmt
, cmp_code
);
5628 if (gimple_assign_rhs_code (stmt
) == COND_EXPR
)
5630 tree cond
= build2 (cmp_code
, boolean_type_node
, lhs
,
5631 boolean_false_node
);
5632 gimple_assign_set_rhs1 (stmt
, cond
);
5633 lhs
= gimple_assign_lhs (stmt
);
5634 gcc_assert (TREE_CODE (TREE_TYPE (lhs
)) != BITINT_TYPE
5635 || (bitint_precision_kind (TREE_TYPE (lhs
))
5636 <= bitint_prec_middle
));
5640 gimple_assign_set_rhs1 (stmt
, lhs
);
5641 gimple_assign_set_rhs2 (stmt
, boolean_false_node
);
5642 gimple_assign_set_rhs_code (stmt
, cmp_code
);
5648 tree lhs_type
= TREE_TYPE (lhs
);
5649 /* Add support for 3 or more limbs filled in from normal integral
5650 type if this assert fails. If no target chooses limb mode smaller
5651 than half of largest supported normal integral type, this will not
5653 gcc_assert (TYPE_PRECISION (lhs_type
) <= 2 * limb_prec
);
5655 if ((TREE_CODE (lhs_type
) == BITINT_TYPE
5656 && bitint_precision_kind (lhs_type
) == bitint_prec_middle
)
5657 || POINTER_TYPE_P (lhs_type
))
5658 lhs_type
= build_nonstandard_integer_type (TYPE_PRECISION (lhs_type
),
5659 TYPE_UNSIGNED (lhs_type
));
5661 tree rhs1
= gimple_assign_rhs1 (stmt
);
5662 tree r1
= handle_operand (rhs1
, size_int (0));
5663 if (!useless_type_conversion_p (lhs_type
, TREE_TYPE (r1
)))
5664 r1
= add_cast (lhs_type
, r1
);
5665 if (TYPE_PRECISION (lhs_type
) > limb_prec
)
5669 tree r2
= handle_operand (rhs1
, size_int (1));
5670 r2
= add_cast (lhs_type
, r2
);
5671 g
= gimple_build_assign (make_ssa_name (lhs_type
), LSHIFT_EXPR
, r2
,
5672 build_int_cst (unsigned_type_node
,
5675 g
= gimple_build_assign (make_ssa_name (lhs_type
), BIT_IOR_EXPR
, r1
,
5676 gimple_assign_lhs (g
));
5678 r1
= gimple_assign_lhs (g
);
5680 if (lhs_type
!= TREE_TYPE (lhs
))
5681 g
= gimple_build_assign (lhs
, NOP_EXPR
, r1
);
5683 g
= gimple_build_assign (lhs
, r1
);
5684 gsi_replace (&m_gsi
, g
, true);
5687 if (is_gimple_assign (stmt
))
5688 switch (gimple_assign_rhs_code (stmt
))
5692 lower_shift_stmt (NULL_TREE
, stmt
);
5695 case TRUNC_DIV_EXPR
:
5696 case TRUNC_MOD_EXPR
:
5697 lower_muldiv_stmt (NULL_TREE
, stmt
);
5699 case FIX_TRUNC_EXPR
:
5701 lower_float_conv_stmt (NULL_TREE
, stmt
);
5705 lower_cplxpart_stmt (NULL_TREE
, stmt
);
5708 lower_complexexpr_stmt (stmt
);
5716 /* Helper for walk_non_aliased_vuses. Determine if we arrived at
5717 the desired memory state. */
5720 vuse_eq (ao_ref
*, tree vuse1
, void *data
)
5722 tree vuse2
= (tree
) data
;
5729 /* Return true if STMT uses a library function and needs to take
5730 address of its inputs. We need to avoid bit-fields in those
5731 cases. Similarly, we need to avoid overlap between destination
5732 and source limb arrays. */
5735 stmt_needs_operand_addr (gimple
*stmt
)
5737 if (is_gimple_assign (stmt
))
5738 switch (gimple_assign_rhs_code (stmt
))
5741 case TRUNC_DIV_EXPR
:
5742 case TRUNC_MOD_EXPR
:
5748 else if (gimple_call_internal_p (stmt
, IFN_MUL_OVERFLOW
)
5749 || gimple_call_internal_p (stmt
, IFN_UBSAN_CHECK_MUL
))
5754 /* Dominator walker used to discover which large/huge _BitInt
5755 loads could be sunk into all their uses. */
5757 class bitint_dom_walker
: public dom_walker
5760 bitint_dom_walker (bitmap names
, bitmap loads
)
5761 : dom_walker (CDI_DOMINATORS
), m_names (names
), m_loads (loads
) {}
5763 edge
before_dom_children (basic_block
) final override
;
5766 bitmap m_names
, m_loads
;
5770 bitint_dom_walker::before_dom_children (basic_block bb
)
5772 gphi
*phi
= get_virtual_phi (bb
);
5775 vop
= gimple_phi_result (phi
);
5776 else if (bb
== ENTRY_BLOCK_PTR_FOR_FN (cfun
))
5779 vop
= (tree
) get_immediate_dominator (CDI_DOMINATORS
, bb
)->aux
;
5781 auto_vec
<tree
, 16> worklist
;
5782 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
5783 !gsi_end_p (gsi
); gsi_next (&gsi
))
5785 gimple
*stmt
= gsi_stmt (gsi
);
5786 if (is_gimple_debug (stmt
))
5789 if (!vop
&& gimple_vuse (stmt
))
5790 vop
= gimple_vuse (stmt
);
5793 if (gimple_vdef (stmt
))
5794 vop
= gimple_vdef (stmt
);
5796 tree lhs
= gimple_get_lhs (stmt
);
5798 && TREE_CODE (lhs
) == SSA_NAME
5799 && TREE_CODE (TREE_TYPE (lhs
)) == BITINT_TYPE
5800 && bitint_precision_kind (TREE_TYPE (lhs
)) >= bitint_prec_large
5801 && !bitmap_bit_p (m_names
, SSA_NAME_VERSION (lhs
)))
5802 /* If lhs of stmt is large/huge _BitInt SSA_NAME not in m_names,
5803 it means it will be handled in a loop or straight line code
5804 at the location of its (ultimate) immediate use, so for
5805 vop checking purposes check these only at the ultimate
5810 use_operand_p use_p
;
5811 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, oi
, SSA_OP_USE
)
5813 tree s
= USE_FROM_PTR (use_p
);
5814 if (TREE_CODE (TREE_TYPE (s
)) == BITINT_TYPE
5815 && bitint_precision_kind (TREE_TYPE (s
)) >= bitint_prec_large
)
5816 worklist
.safe_push (s
);
5819 bool needs_operand_addr
= stmt_needs_operand_addr (stmt
);
5820 while (worklist
.length () > 0)
5822 tree s
= worklist
.pop ();
5824 if (!bitmap_bit_p (m_names
, SSA_NAME_VERSION (s
)))
5826 gimple
*g
= SSA_NAME_DEF_STMT (s
);
5827 needs_operand_addr
|= stmt_needs_operand_addr (g
);
5828 FOR_EACH_SSA_USE_OPERAND (use_p
, g
, oi
, SSA_OP_USE
)
5830 tree s2
= USE_FROM_PTR (use_p
);
5831 if (TREE_CODE (TREE_TYPE (s2
)) == BITINT_TYPE
5832 && (bitint_precision_kind (TREE_TYPE (s2
))
5833 >= bitint_prec_large
))
5834 worklist
.safe_push (s2
);
5838 if (!SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s
)
5839 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (s
)))
5841 tree rhs
= gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s
));
5842 if (TREE_CODE (rhs
) == SSA_NAME
5843 && bitmap_bit_p (m_loads
, SSA_NAME_VERSION (rhs
)))
5848 else if (!bitmap_bit_p (m_loads
, SSA_NAME_VERSION (s
)))
5851 gimple
*g
= SSA_NAME_DEF_STMT (s
);
5852 tree rhs1
= gimple_assign_rhs1 (g
);
5853 if (needs_operand_addr
5854 && TREE_CODE (rhs1
) == COMPONENT_REF
5855 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (rhs1
, 1)))
5857 tree fld
= TREE_OPERAND (rhs1
, 1);
5858 /* For little-endian, we can allow as inputs bit-fields
5859 which start at a limb boundary. */
5860 if (DECL_OFFSET_ALIGN (fld
) >= TYPE_ALIGN (TREE_TYPE (rhs1
))
5861 && tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld
))
5862 && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
))
5867 bitmap_clear_bit (m_loads
, SSA_NAME_VERSION (s
));
5873 ao_ref_init (&ref
, rhs1
);
5874 tree lvop
= gimple_vuse (g
);
5875 unsigned limit
= 64;
5878 && is_gimple_assign (stmt
)
5879 && gimple_store_p (stmt
)
5880 && (needs_operand_addr
5881 || !operand_equal_p (lhs
, gimple_assign_rhs1 (g
), 0)))
5884 && walk_non_aliased_vuses (&ref
, vuse
, false, vuse_eq
,
5885 NULL
, NULL
, limit
, lvop
) == NULL
)
5886 bitmap_clear_bit (m_loads
, SSA_NAME_VERSION (s
));
5890 bb
->aux
= (void *) vop
;
5896 /* Replacement for normal processing of STMT in tree-ssa-coalesce.cc
5897 build_ssa_conflict_graph.
5898 The differences are:
5899 1) don't process assignments with large/huge _BitInt lhs not in NAMES
5900 2) for large/huge _BitInt multiplication/division/modulo process def
5901 only after processing uses rather than before to make uses conflict
5903 3) for large/huge _BitInt uses not in NAMES mark the uses of their
5904 SSA_NAME_DEF_STMT (recursively), because those uses will be sunk into
5905 the final statement. */
5908 build_bitint_stmt_ssa_conflicts (gimple
*stmt
, live_track
*live
,
5909 ssa_conflicts
*graph
, bitmap names
,
5910 void (*def
) (live_track
*, tree
,
5912 void (*use
) (live_track
*, tree
))
5914 bool muldiv_p
= false;
5915 tree lhs
= NULL_TREE
;
5916 if (is_gimple_assign (stmt
))
5918 lhs
= gimple_assign_lhs (stmt
);
5919 if (TREE_CODE (lhs
) == SSA_NAME
)
5921 tree type
= TREE_TYPE (lhs
);
5922 if (TREE_CODE (type
) == COMPLEX_TYPE
)
5923 type
= TREE_TYPE (type
);
5924 if (TREE_CODE (type
) == BITINT_TYPE
5925 && bitint_precision_kind (type
) >= bitint_prec_large
)
5927 if (!bitmap_bit_p (names
, SSA_NAME_VERSION (lhs
)))
5929 switch (gimple_assign_rhs_code (stmt
))
5932 case TRUNC_DIV_EXPR
:
5933 case TRUNC_MOD_EXPR
:
5946 /* For stmts with more than one SSA_NAME definition pretend all the
5947 SSA_NAME outputs but the first one are live at this point, so
5948 that conflicts are added in between all those even when they are
5949 actually not really live after the asm, because expansion might
5950 copy those into pseudos after the asm and if multiple outputs
5951 share the same partition, it might overwrite those that should
5953 asm volatile (".." : "=r" (a) : "=r" (b) : "0" (a), "1" (a));
5957 FOR_EACH_SSA_TREE_OPERAND (var
, stmt
, iter
, SSA_OP_DEF
)
5963 FOR_EACH_SSA_TREE_OPERAND (var
, stmt
, iter
, SSA_OP_DEF
)
5964 def (live
, var
, graph
);
5967 auto_vec
<tree
, 16> worklist
;
5968 FOR_EACH_SSA_TREE_OPERAND (var
, stmt
, iter
, SSA_OP_USE
)
5970 tree type
= TREE_TYPE (var
);
5971 if (TREE_CODE (type
) == COMPLEX_TYPE
)
5972 type
= TREE_TYPE (type
);
5973 if (TREE_CODE (type
) == BITINT_TYPE
5974 && bitint_precision_kind (type
) >= bitint_prec_large
)
5976 if (bitmap_bit_p (names
, SSA_NAME_VERSION (var
)))
5979 worklist
.safe_push (var
);
5983 while (worklist
.length () > 0)
5985 tree s
= worklist
.pop ();
5986 FOR_EACH_SSA_TREE_OPERAND (var
, SSA_NAME_DEF_STMT (s
), iter
, SSA_OP_USE
)
5988 tree type
= TREE_TYPE (var
);
5989 if (TREE_CODE (type
) == COMPLEX_TYPE
)
5990 type
= TREE_TYPE (type
);
5991 if (TREE_CODE (type
) == BITINT_TYPE
5992 && bitint_precision_kind (type
) >= bitint_prec_large
)
5994 if (bitmap_bit_p (names
, SSA_NAME_VERSION (var
)))
5997 worklist
.safe_push (var
);
6003 def (live
, lhs
, graph
);
6006 /* If STMT is .{ADD,SUB,MUL}_OVERFLOW with INTEGER_CST arguments,
6007 return the largest bitint_prec_kind of them, otherwise return
6008 bitint_prec_small. */
6010 static bitint_prec_kind
6011 arith_overflow_arg_kind (gimple
*stmt
)
6013 bitint_prec_kind ret
= bitint_prec_small
;
6014 if (is_gimple_call (stmt
) && gimple_call_internal_p (stmt
))
6015 switch (gimple_call_internal_fn (stmt
))
6017 case IFN_ADD_OVERFLOW
:
6018 case IFN_SUB_OVERFLOW
:
6019 case IFN_MUL_OVERFLOW
:
6020 for (int i
= 0; i
< 2; ++i
)
6022 tree a
= gimple_call_arg (stmt
, i
);
6023 if (TREE_CODE (a
) == INTEGER_CST
6024 && TREE_CODE (TREE_TYPE (a
)) == BITINT_TYPE
)
6026 bitint_prec_kind kind
= bitint_precision_kind (TREE_TYPE (a
));
6027 ret
= MAX (ret
, kind
);
6037 /* Entry point for _BitInt(N) operation lowering during optimization. */
6040 gimple_lower_bitint (void)
6042 small_max_prec
= mid_min_prec
= large_min_prec
= huge_min_prec
= 0;
6046 for (i
= 0; i
< num_ssa_names
; ++i
)
6048 tree s
= ssa_name (i
);
6051 tree type
= TREE_TYPE (s
);
6052 if (TREE_CODE (type
) == COMPLEX_TYPE
)
6054 if (arith_overflow_arg_kind (SSA_NAME_DEF_STMT (s
))
6055 != bitint_prec_small
)
6057 type
= TREE_TYPE (type
);
6059 if (TREE_CODE (type
) == BITINT_TYPE
6060 && bitint_precision_kind (type
) != bitint_prec_small
)
6062 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
6063 into memory. Such functions could have no large/huge SSA_NAMEs. */
6064 if (SSA_NAME_IS_VIRTUAL_OPERAND (s
))
6066 gimple
*g
= SSA_NAME_DEF_STMT (s
);
6067 if (is_gimple_assign (g
) && gimple_store_p (g
))
6069 tree t
= gimple_assign_rhs1 (g
);
6070 if (TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
6071 && (bitint_precision_kind (TREE_TYPE (t
))
6072 >= bitint_prec_large
))
6076 /* Similarly, e.g. with -frounding-math casts from _BitInt INTEGER_CSTs
6077 to floating point types need to be rewritten. */
6078 else if (SCALAR_FLOAT_TYPE_P (type
))
6080 gimple
*g
= SSA_NAME_DEF_STMT (s
);
6081 if (is_gimple_assign (g
) && gimple_assign_rhs_code (g
) == FLOAT_EXPR
)
6083 tree t
= gimple_assign_rhs1 (g
);
6084 if (TREE_CODE (t
) == INTEGER_CST
6085 && TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
6086 && (bitint_precision_kind (TREE_TYPE (t
))
6087 != bitint_prec_small
))
6092 if (i
== num_ssa_names
)
6096 auto_vec
<gimple
*, 4> switch_statements
;
6097 FOR_EACH_BB_FN (bb
, cfun
)
6099 if (gswitch
*swtch
= safe_dyn_cast
<gswitch
*> (*gsi_last_bb (bb
)))
6101 tree idx
= gimple_switch_index (swtch
);
6102 if (TREE_CODE (TREE_TYPE (idx
)) != BITINT_TYPE
6103 || bitint_precision_kind (TREE_TYPE (idx
)) < bitint_prec_large
)
6107 group_case_labels_stmt (swtch
);
6108 if (gimple_switch_num_labels (swtch
) == 1)
6110 single_succ_edge (bb
)->flags
|= EDGE_FALLTHRU
;
6111 gimple_stmt_iterator gsi
= gsi_for_stmt (swtch
);
6112 gsi_remove (&gsi
, true);
6115 switch_statements
.safe_push (swtch
);
6119 if (!switch_statements
.is_empty ())
6121 bool expanded
= false;
6125 FOR_EACH_VEC_ELT (switch_statements
, j
, stmt
)
6127 gswitch
*swtch
= as_a
<gswitch
*> (stmt
);
6128 tree_switch_conversion::switch_decision_tree
dt (swtch
);
6129 expanded
|= dt
.analyze_switch_statement ();
6134 free_dominance_info (CDI_DOMINATORS
);
6135 free_dominance_info (CDI_POST_DOMINATORS
);
6136 mark_virtual_operands_for_renaming (cfun
);
6137 cleanup_tree_cfg (TODO_update_ssa
);
6141 struct bitint_large_huge large_huge
;
6142 bool has_large_huge_parm_result
= false;
6143 bool has_large_huge
= false;
6144 unsigned int ret
= 0, first_large_huge
= ~0U;
6145 bool edge_insertions
= false;
6146 for (; i
< num_ssa_names
; ++i
)
6148 tree s
= ssa_name (i
);
6151 tree type
= TREE_TYPE (s
);
6152 if (TREE_CODE (type
) == COMPLEX_TYPE
)
6154 if (arith_overflow_arg_kind (SSA_NAME_DEF_STMT (s
))
6155 >= bitint_prec_large
)
6156 has_large_huge
= true;
6157 type
= TREE_TYPE (type
);
6159 if (TREE_CODE (type
) == BITINT_TYPE
6160 && bitint_precision_kind (type
) >= bitint_prec_large
)
6162 if (first_large_huge
== ~0U)
6163 first_large_huge
= i
;
6164 gimple
*stmt
= SSA_NAME_DEF_STMT (s
), *g
;
6165 gimple_stmt_iterator gsi
;
6167 /* Unoptimize certain constructs to simpler alternatives to
6168 avoid having to lower all of them. */
6169 if (is_gimple_assign (stmt
) && gimple_bb (stmt
))
6170 switch (rhs_code
= gimple_assign_rhs_code (stmt
))
6175 case TRUNC_DIV_EXPR
:
6176 case TRUNC_MOD_EXPR
:
6177 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s
))
6179 location_t loc
= gimple_location (stmt
);
6180 gsi
= gsi_for_stmt (stmt
);
6181 tree rhs1
= gimple_assign_rhs1 (stmt
);
6182 tree rhs2
= gimple_assign_rhs2 (stmt
);
6183 /* For multiplication and division with (ab)
6184 lhs and one or both operands force the operands
6185 into new SSA_NAMEs to avoid coalescing failures. */
6186 if (TREE_CODE (rhs1
) == SSA_NAME
6187 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1
))
6189 first_large_huge
= 0;
6190 tree t
= make_ssa_name (TREE_TYPE (rhs1
));
6191 g
= gimple_build_assign (t
, SSA_NAME
, rhs1
);
6192 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
6193 gimple_set_location (g
, loc
);
6194 gimple_assign_set_rhs1 (stmt
, t
);
6197 gimple_assign_set_rhs2 (stmt
, t
);
6202 if (TREE_CODE (rhs2
) == SSA_NAME
6203 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs2
))
6205 first_large_huge
= 0;
6206 tree t
= make_ssa_name (TREE_TYPE (rhs2
));
6207 g
= gimple_build_assign (t
, SSA_NAME
, rhs2
);
6208 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
6209 gimple_set_location (g
, loc
);
6210 gimple_assign_set_rhs2 (stmt
, t
);
6218 first_large_huge
= 0;
6219 location_t loc
= gimple_location (stmt
);
6220 gsi
= gsi_for_stmt (stmt
);
6221 tree rhs1
= gimple_assign_rhs1 (stmt
);
6222 tree type
= TREE_TYPE (rhs1
);
6223 tree n
= gimple_assign_rhs2 (stmt
), m
;
6224 tree p
= build_int_cst (TREE_TYPE (n
),
6225 TYPE_PRECISION (type
));
6226 if (TREE_CODE (n
) == INTEGER_CST
)
6227 m
= fold_build2 (MINUS_EXPR
, TREE_TYPE (n
), p
, n
);
6230 m
= make_ssa_name (TREE_TYPE (n
));
6231 g
= gimple_build_assign (m
, MINUS_EXPR
, p
, n
);
6232 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
6233 gimple_set_location (g
, loc
);
6235 if (!TYPE_UNSIGNED (type
))
6237 tree utype
= build_bitint_type (TYPE_PRECISION (type
),
6239 if (TREE_CODE (rhs1
) == INTEGER_CST
)
6240 rhs1
= fold_convert (utype
, rhs1
);
6243 tree t
= make_ssa_name (type
);
6244 g
= gimple_build_assign (t
, NOP_EXPR
, rhs1
);
6245 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
6246 gimple_set_location (g
, loc
);
6249 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1
)),
6250 rhs_code
== LROTATE_EXPR
6251 ? LSHIFT_EXPR
: RSHIFT_EXPR
,
6253 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
6254 gimple_set_location (g
, loc
);
6255 tree op1
= gimple_assign_lhs (g
);
6256 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1
)),
6257 rhs_code
== LROTATE_EXPR
6258 ? RSHIFT_EXPR
: LSHIFT_EXPR
,
6260 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
6261 gimple_set_location (g
, loc
);
6262 tree op2
= gimple_assign_lhs (g
);
6263 tree lhs
= gimple_assign_lhs (stmt
);
6264 if (!TYPE_UNSIGNED (type
))
6266 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (op1
)),
6267 BIT_IOR_EXPR
, op1
, op2
);
6268 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
6269 gimple_set_location (g
, loc
);
6270 g
= gimple_build_assign (lhs
, NOP_EXPR
,
6271 gimple_assign_lhs (g
));
6274 g
= gimple_build_assign (lhs
, BIT_IOR_EXPR
, op1
, op2
);
6275 gsi_replace (&gsi
, g
, true);
6276 gimple_set_location (g
, loc
);
6284 first_large_huge
= 0;
6285 gsi
= gsi_for_stmt (stmt
);
6286 tree lhs
= gimple_assign_lhs (stmt
);
6287 tree rhs1
= gimple_assign_rhs1 (stmt
), rhs2
= NULL_TREE
;
6288 location_t loc
= gimple_location (stmt
);
6289 if (rhs_code
== ABS_EXPR
)
6290 g
= gimple_build_cond (LT_EXPR
, rhs1
,
6291 build_zero_cst (TREE_TYPE (rhs1
)),
6292 NULL_TREE
, NULL_TREE
);
6293 else if (rhs_code
== ABSU_EXPR
)
6295 rhs2
= make_ssa_name (TREE_TYPE (lhs
));
6296 g
= gimple_build_assign (rhs2
, NOP_EXPR
, rhs1
);
6297 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
6298 gimple_set_location (g
, loc
);
6299 g
= gimple_build_cond (LT_EXPR
, rhs1
,
6300 build_zero_cst (TREE_TYPE (rhs1
)),
6301 NULL_TREE
, NULL_TREE
);
6304 else if (rhs_code
== MIN_EXPR
|| rhs_code
== MAX_EXPR
)
6306 rhs2
= gimple_assign_rhs2 (stmt
);
6307 if (TREE_CODE (rhs1
) == INTEGER_CST
)
6308 std::swap (rhs1
, rhs2
);
6309 g
= gimple_build_cond (LT_EXPR
, rhs1
, rhs2
,
6310 NULL_TREE
, NULL_TREE
);
6311 if (rhs_code
== MAX_EXPR
)
6312 std::swap (rhs1
, rhs2
);
6316 g
= gimple_build_cond (NE_EXPR
, rhs1
,
6317 build_zero_cst (TREE_TYPE (rhs1
)),
6318 NULL_TREE
, NULL_TREE
);
6319 rhs1
= gimple_assign_rhs2 (stmt
);
6320 rhs2
= gimple_assign_rhs3 (stmt
);
6322 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
6323 gimple_set_location (g
, loc
);
6324 edge e1
= split_block (gsi_bb (gsi
), g
);
6325 edge e2
= split_block (e1
->dest
, (gimple
*) NULL
);
6326 edge e3
= make_edge (e1
->src
, e2
->dest
, EDGE_FALSE_VALUE
);
6327 e3
->probability
= profile_probability::even ();
6328 e1
->flags
= EDGE_TRUE_VALUE
;
6329 e1
->probability
= e3
->probability
.invert ();
6330 if (dom_info_available_p (CDI_DOMINATORS
))
6331 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e1
->src
);
6332 if (rhs_code
== ABS_EXPR
|| rhs_code
== ABSU_EXPR
)
6334 gsi
= gsi_after_labels (e1
->dest
);
6335 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1
)),
6337 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
6338 gimple_set_location (g
, loc
);
6339 rhs2
= gimple_assign_lhs (g
);
6340 std::swap (rhs1
, rhs2
);
6342 gsi
= gsi_for_stmt (stmt
);
6343 gsi_remove (&gsi
, true);
6344 gphi
*phi
= create_phi_node (lhs
, e2
->dest
);
6345 add_phi_arg (phi
, rhs1
, e2
, UNKNOWN_LOCATION
);
6346 add_phi_arg (phi
, rhs2
, e3
, UNKNOWN_LOCATION
);
6350 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
6351 into memory. Such functions could have no large/huge SSA_NAMEs. */
6352 else if (SSA_NAME_IS_VIRTUAL_OPERAND (s
))
6354 gimple
*g
= SSA_NAME_DEF_STMT (s
);
6355 if (is_gimple_assign (g
) && gimple_store_p (g
))
6357 tree t
= gimple_assign_rhs1 (g
);
6358 if (TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
6359 && (bitint_precision_kind (TREE_TYPE (t
))
6360 >= bitint_prec_large
))
6361 has_large_huge
= true;
6364 /* Similarly, e.g. with -frounding-math casts from _BitInt INTEGER_CSTs
6365 to floating point types need to be rewritten. */
6366 else if (SCALAR_FLOAT_TYPE_P (type
))
6368 gimple
*g
= SSA_NAME_DEF_STMT (s
);
6369 if (is_gimple_assign (g
) && gimple_assign_rhs_code (g
) == FLOAT_EXPR
)
6371 tree t
= gimple_assign_rhs1 (g
);
6372 if (TREE_CODE (t
) == INTEGER_CST
6373 && TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
6374 && (bitint_precision_kind (TREE_TYPE (t
))
6375 >= bitint_prec_large
))
6376 has_large_huge
= true;
6380 for (i
= first_large_huge
; i
< num_ssa_names
; ++i
)
6382 tree s
= ssa_name (i
);
6385 tree type
= TREE_TYPE (s
);
6386 if (TREE_CODE (type
) == COMPLEX_TYPE
)
6387 type
= TREE_TYPE (type
);
6388 if (TREE_CODE (type
) == BITINT_TYPE
6389 && bitint_precision_kind (type
) >= bitint_prec_large
)
6391 use_operand_p use_p
;
6393 has_large_huge
= true;
6395 && optimizable_arith_overflow (SSA_NAME_DEF_STMT (s
)))
6397 /* Ignore large/huge _BitInt SSA_NAMEs which have single use in
6398 the same bb and could be handled in the same loop with the
6401 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s
)
6402 && single_imm_use (s
, &use_p
, &use_stmt
)
6403 && gimple_bb (SSA_NAME_DEF_STMT (s
)) == gimple_bb (use_stmt
))
6405 if (mergeable_op (SSA_NAME_DEF_STMT (s
)))
6407 if (mergeable_op (use_stmt
))
6409 tree_code cmp_code
= comparison_op (use_stmt
, NULL
, NULL
);
6410 if (cmp_code
== EQ_EXPR
|| cmp_code
== NE_EXPR
)
6412 if (gimple_assign_cast_p (use_stmt
))
6414 tree lhs
= gimple_assign_lhs (use_stmt
);
6415 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
6416 /* Don't merge with VIEW_CONVERT_EXPRs to
6417 huge INTEGER_TYPEs used sometimes in memcpy
6419 && (TREE_CODE (TREE_TYPE (lhs
)) != INTEGER_TYPE
6420 || (TYPE_PRECISION (TREE_TYPE (lhs
))
6421 <= MAX_FIXED_MODE_SIZE
)))
6424 else if (gimple_store_p (use_stmt
)
6425 && is_gimple_assign (use_stmt
)
6426 && !gimple_has_volatile_ops (use_stmt
)
6427 && !stmt_ends_bb_p (use_stmt
))
6430 if (gimple_assign_cast_p (SSA_NAME_DEF_STMT (s
)))
6432 tree rhs1
= gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s
));
6433 if (TREE_CODE (rhs1
) == VIEW_CONVERT_EXPR
)
6435 rhs1
= TREE_OPERAND (rhs1
, 0);
6436 if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
6437 && !POINTER_TYPE_P (TREE_TYPE (rhs1
))
6438 && gimple_store_p (use_stmt
))
6441 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
6442 && ((is_gimple_assign (use_stmt
)
6443 && (gimple_assign_rhs_code (use_stmt
)
6445 || gimple_code (use_stmt
) == GIMPLE_COND
)
6446 && (!gimple_store_p (use_stmt
)
6447 || (is_gimple_assign (use_stmt
)
6448 && !gimple_has_volatile_ops (use_stmt
)
6449 && !stmt_ends_bb_p (use_stmt
)))
6450 && (TREE_CODE (rhs1
) != SSA_NAME
6451 || !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1
)))
6453 if (is_gimple_assign (use_stmt
))
6454 switch (gimple_assign_rhs_code (use_stmt
))
6456 case TRUNC_DIV_EXPR
:
6457 case TRUNC_MOD_EXPR
:
6459 /* For division, modulo and casts to floating
6460 point, avoid representing unsigned operands
6461 using negative prec if they were sign-extended
6462 from narrower precision. */
6463 if (TYPE_UNSIGNED (TREE_TYPE (s
))
6464 && !TYPE_UNSIGNED (TREE_TYPE (rhs1
))
6465 && (TYPE_PRECISION (TREE_TYPE (s
))
6466 > TYPE_PRECISION (TREE_TYPE (rhs1
))))
6470 if (TREE_CODE (TREE_TYPE (rhs1
)) != BITINT_TYPE
6471 || (bitint_precision_kind (TREE_TYPE (rhs1
))
6472 < bitint_prec_large
))
6474 /* Uses which use handle_operand_addr can't
6475 deal with nested casts. */
6476 if (TREE_CODE (rhs1
) == SSA_NAME
6477 && gimple_assign_cast_p
6478 (SSA_NAME_DEF_STMT (rhs1
))
6479 && has_single_use (rhs1
)
6480 && (gimple_bb (SSA_NAME_DEF_STMT (rhs1
))
6481 == gimple_bb (SSA_NAME_DEF_STMT (s
))))
6484 case VIEW_CONVERT_EXPR
:
6486 tree lhs
= gimple_assign_lhs (use_stmt
);
6487 /* Don't merge with VIEW_CONVERT_EXPRs to
6488 non-integral types. */
6489 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs
)))
6491 /* Don't merge with VIEW_CONVERT_EXPRs to
6492 huge INTEGER_TYPEs used sometimes in memcpy
6494 if (TREE_CODE (TREE_TYPE (lhs
)) == INTEGER_TYPE
6495 && (TYPE_PRECISION (TREE_TYPE (lhs
))
6496 > MAX_FIXED_MODE_SIZE
))
6503 if (TREE_CODE (TREE_TYPE (rhs1
)) != BITINT_TYPE
6504 || (bitint_precision_kind (TREE_TYPE (rhs1
))
6505 < bitint_prec_large
))
6507 if ((TYPE_PRECISION (TREE_TYPE (rhs1
))
6508 >= TYPE_PRECISION (TREE_TYPE (s
)))
6509 && mergeable_op (use_stmt
))
6511 /* Prevent merging a widening non-mergeable cast
6512 on result of some narrower mergeable op
6513 together with later mergeable operations. E.g.
6514 result of _BitInt(223) addition shouldn't be
6515 sign-extended to _BitInt(513) and have another
6516 _BitInt(513) added to it, as handle_plus_minus
6517 with its PHI node handling inside of handle_cast
6518 will not work correctly. An exception is if
6519 use_stmt is a store, this is handled directly
6520 in lower_mergeable_stmt. */
6521 if (TREE_CODE (rhs1
) != SSA_NAME
6522 || !has_single_use (rhs1
)
6523 || (gimple_bb (SSA_NAME_DEF_STMT (rhs1
))
6524 != gimple_bb (SSA_NAME_DEF_STMT (s
)))
6525 || !mergeable_op (SSA_NAME_DEF_STMT (rhs1
))
6526 || gimple_store_p (use_stmt
))
6528 if ((TYPE_PRECISION (TREE_TYPE (rhs1
))
6529 < TYPE_PRECISION (TREE_TYPE (s
)))
6530 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (rhs1
)))
6532 /* Another exception is if the widening cast is
6533 from mergeable same precision cast from something
6536 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (rhs1
));
6537 if (TREE_CODE (TREE_TYPE (rhs2
)) == BITINT_TYPE
6538 && (TYPE_PRECISION (TREE_TYPE (rhs1
))
6539 == TYPE_PRECISION (TREE_TYPE (rhs2
))))
6541 if (TREE_CODE (rhs2
) != SSA_NAME
6542 || !has_single_use (rhs2
)
6543 || (gimple_bb (SSA_NAME_DEF_STMT (rhs2
))
6544 != gimple_bb (SSA_NAME_DEF_STMT (s
)))
6545 || !mergeable_op (SSA_NAME_DEF_STMT (rhs2
)))
6551 if (is_gimple_assign (SSA_NAME_DEF_STMT (s
)))
6552 switch (gimple_assign_rhs_code (SSA_NAME_DEF_STMT (s
)))
6556 tree rhs1
= gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s
));
6557 rhs1
= TREE_OPERAND (rhs1
, 0);
6558 if (TREE_CODE (rhs1
) == SSA_NAME
)
6560 gimple
*g
= SSA_NAME_DEF_STMT (rhs1
);
6561 if (optimizable_arith_overflow (g
))
6569 case TRUNC_DIV_EXPR
:
6570 case TRUNC_MOD_EXPR
:
6571 case FIX_TRUNC_EXPR
:
6573 if (gimple_store_p (use_stmt
)
6574 && is_gimple_assign (use_stmt
)
6575 && !gimple_has_volatile_ops (use_stmt
)
6576 && !stmt_ends_bb_p (use_stmt
))
6578 tree lhs
= gimple_assign_lhs (use_stmt
);
6579 /* As multiply/division passes address of the lhs
6580 to library function and that assumes it can extend
6581 it to whole number of limbs, avoid merging those
6582 with bit-field stores. Don't allow it for
6583 shifts etc. either, so that the bit-field store
6584 handling doesn't have to be done everywhere. */
6585 if (TREE_CODE (lhs
) == COMPONENT_REF
6586 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs
, 1)))
6596 /* Also ignore uninitialized uses. */
6597 if (SSA_NAME_IS_DEFAULT_DEF (s
)
6598 && (!SSA_NAME_VAR (s
) || VAR_P (SSA_NAME_VAR (s
))))
6602 if (!large_huge
.m_names
)
6603 large_huge
.m_names
= BITMAP_ALLOC (NULL
);
6604 bitmap_set_bit (large_huge
.m_names
, SSA_NAME_VERSION (s
));
6605 if (has_single_use (s
))
6607 if (!large_huge
.m_single_use_names
)
6608 large_huge
.m_single_use_names
= BITMAP_ALLOC (NULL
);
6609 bitmap_set_bit (large_huge
.m_single_use_names
,
6610 SSA_NAME_VERSION (s
));
6612 if (SSA_NAME_VAR (s
)
6613 && ((TREE_CODE (SSA_NAME_VAR (s
)) == PARM_DECL
6614 && SSA_NAME_IS_DEFAULT_DEF (s
))
6615 || TREE_CODE (SSA_NAME_VAR (s
)) == RESULT_DECL
))
6616 has_large_huge_parm_result
= true;
6618 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s
)
6619 && gimple_assign_load_p (SSA_NAME_DEF_STMT (s
))
6620 && !gimple_has_volatile_ops (SSA_NAME_DEF_STMT (s
))
6621 && !stmt_ends_bb_p (SSA_NAME_DEF_STMT (s
)))
6623 use_operand_p use_p
;
6624 imm_use_iterator iter
;
6625 bool optimizable_load
= true;
6626 FOR_EACH_IMM_USE_FAST (use_p
, iter
, s
)
6628 gimple
*use_stmt
= USE_STMT (use_p
);
6629 if (is_gimple_debug (use_stmt
))
6631 if (gimple_code (use_stmt
) == GIMPLE_PHI
6632 || is_gimple_call (use_stmt
)
6633 || gimple_code (use_stmt
) == GIMPLE_ASM
6634 || (is_gimple_assign (use_stmt
)
6635 && (gimple_assign_rhs_code (use_stmt
)
6638 optimizable_load
= false;
6644 FOR_EACH_SSA_USE_OPERAND (use_p
, SSA_NAME_DEF_STMT (s
),
6647 tree s2
= USE_FROM_PTR (use_p
);
6648 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s2
))
6650 optimizable_load
= false;
6655 if (optimizable_load
&& !stmt_ends_bb_p (SSA_NAME_DEF_STMT (s
)))
6657 if (!large_huge
.m_loads
)
6658 large_huge
.m_loads
= BITMAP_ALLOC (NULL
);
6659 bitmap_set_bit (large_huge
.m_loads
, SSA_NAME_VERSION (s
));
6663 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
6664 into memory. Such functions could have no large/huge SSA_NAMEs. */
6665 else if (SSA_NAME_IS_VIRTUAL_OPERAND (s
))
6667 gimple
*g
= SSA_NAME_DEF_STMT (s
);
6668 if (is_gimple_assign (g
) && gimple_store_p (g
))
6670 tree t
= gimple_assign_rhs1 (g
);
6671 if (TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
6672 && bitint_precision_kind (TREE_TYPE (t
)) >= bitint_prec_large
)
6673 has_large_huge
= true;
6678 if (large_huge
.m_names
|| has_large_huge
)
6680 ret
= TODO_update_ssa_only_virtuals
| TODO_cleanup_cfg
;
6681 calculate_dominance_info (CDI_DOMINATORS
);
6683 enable_ranger (cfun
);
6684 if (large_huge
.m_loads
)
6686 basic_block entry
= ENTRY_BLOCK_PTR_FOR_FN (cfun
);
6688 bitint_dom_walker (large_huge
.m_names
,
6689 large_huge
.m_loads
).walk (entry
);
6690 bitmap_and_compl_into (large_huge
.m_names
, large_huge
.m_loads
);
6691 clear_aux_for_blocks ();
6692 BITMAP_FREE (large_huge
.m_loads
);
6694 large_huge
.m_limb_type
= build_nonstandard_integer_type (limb_prec
, 1);
6695 large_huge
.m_limb_size
6696 = tree_to_uhwi (TYPE_SIZE_UNIT (large_huge
.m_limb_type
));
6698 if (large_huge
.m_names
)
6701 = init_var_map (num_ssa_names
, NULL
, large_huge
.m_names
);
6702 coalesce_ssa_name (large_huge
.m_map
);
6703 partition_view_normal (large_huge
.m_map
);
6704 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6706 fprintf (dump_file
, "After Coalescing:\n");
6707 dump_var_map (dump_file
, large_huge
.m_map
);
6710 = XCNEWVEC (tree
, num_var_partitions (large_huge
.m_map
));
6712 if (has_large_huge_parm_result
)
6713 EXECUTE_IF_SET_IN_BITMAP (large_huge
.m_names
, 0, i
, bi
)
6715 tree s
= ssa_name (i
);
6716 if (SSA_NAME_VAR (s
)
6717 && ((TREE_CODE (SSA_NAME_VAR (s
)) == PARM_DECL
6718 && SSA_NAME_IS_DEFAULT_DEF (s
))
6719 || TREE_CODE (SSA_NAME_VAR (s
)) == RESULT_DECL
))
6721 int p
= var_to_partition (large_huge
.m_map
, s
);
6722 if (large_huge
.m_vars
[p
] == NULL_TREE
)
6724 large_huge
.m_vars
[p
] = SSA_NAME_VAR (s
);
6725 mark_addressable (SSA_NAME_VAR (s
));
6729 tree atype
= NULL_TREE
;
6730 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6731 fprintf (dump_file
, "Mapping SSA_NAMEs to decls:\n");
6732 EXECUTE_IF_SET_IN_BITMAP (large_huge
.m_names
, 0, i
, bi
)
6734 tree s
= ssa_name (i
);
6735 int p
= var_to_partition (large_huge
.m_map
, s
);
6736 if (large_huge
.m_vars
[p
] == NULL_TREE
)
6738 if (atype
== NULL_TREE
6739 || !tree_int_cst_equal (TYPE_SIZE (atype
),
6740 TYPE_SIZE (TREE_TYPE (s
))))
6742 unsigned HOST_WIDE_INT nelts
6743 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (s
))) / limb_prec
;
6744 atype
= build_array_type_nelts (large_huge
.m_limb_type
,
6747 large_huge
.m_vars
[p
] = create_tmp_var (atype
, "bitint");
6748 mark_addressable (large_huge
.m_vars
[p
]);
6750 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6752 print_generic_expr (dump_file
, s
, TDF_SLIM
);
6753 fprintf (dump_file
, " -> ");
6754 print_generic_expr (dump_file
, large_huge
.m_vars
[p
], TDF_SLIM
);
6755 fprintf (dump_file
, "\n");
6760 FOR_EACH_BB_REVERSE_FN (bb
, cfun
)
6762 gimple_stmt_iterator prev
;
6763 for (gimple_stmt_iterator gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
);
6769 gimple
*stmt
= gsi_stmt (gsi
);
6770 if (is_gimple_debug (stmt
))
6772 bitint_prec_kind kind
= bitint_prec_small
;
6774 FOR_EACH_SSA_TREE_OPERAND (t
, stmt
, iter
, SSA_OP_ALL_OPERANDS
)
6775 if (TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
)
6777 bitint_prec_kind this_kind
6778 = bitint_precision_kind (TREE_TYPE (t
));
6779 kind
= MAX (kind
, this_kind
);
6781 if (is_gimple_assign (stmt
) && gimple_store_p (stmt
))
6783 t
= gimple_assign_rhs1 (stmt
);
6784 if (TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
)
6786 bitint_prec_kind this_kind
6787 = bitint_precision_kind (TREE_TYPE (t
));
6788 kind
= MAX (kind
, this_kind
);
6791 if (is_gimple_assign (stmt
)
6792 && gimple_assign_rhs_code (stmt
) == FLOAT_EXPR
)
6794 t
= gimple_assign_rhs1 (stmt
);
6795 if (TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
6796 && TREE_CODE (t
) == INTEGER_CST
)
6798 bitint_prec_kind this_kind
6799 = bitint_precision_kind (TREE_TYPE (t
));
6800 kind
= MAX (kind
, this_kind
);
6803 if (is_gimple_call (stmt
))
6805 t
= gimple_call_lhs (stmt
);
6806 if (t
&& TREE_CODE (TREE_TYPE (t
)) == COMPLEX_TYPE
)
6808 bitint_prec_kind this_kind
= arith_overflow_arg_kind (stmt
);
6809 kind
= MAX (kind
, this_kind
);
6810 if (TREE_CODE (TREE_TYPE (TREE_TYPE (t
))) == BITINT_TYPE
)
6813 = bitint_precision_kind (TREE_TYPE (TREE_TYPE (t
)));
6814 kind
= MAX (kind
, this_kind
);
6818 if (kind
== bitint_prec_small
)
6820 switch (gimple_code (stmt
))
6823 /* For now. We'll need to handle some internal functions and
6824 perhaps some builtins. */
6825 if (kind
== bitint_prec_middle
)
6829 if (kind
== bitint_prec_middle
)
6835 if (gimple_clobber_p (stmt
))
6837 if (kind
>= bitint_prec_large
)
6839 if (gimple_assign_single_p (stmt
))
6840 /* No need to lower copies, loads or stores. */
6842 if (gimple_assign_cast_p (stmt
))
6844 tree lhs
= gimple_assign_lhs (stmt
);
6845 tree rhs
= gimple_assign_rhs1 (stmt
);
6846 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
6847 && INTEGRAL_TYPE_P (TREE_TYPE (rhs
))
6848 && (TYPE_PRECISION (TREE_TYPE (lhs
))
6849 == TYPE_PRECISION (TREE_TYPE (rhs
))))
6850 /* No need to lower casts to same precision. */
6858 if (kind
== bitint_prec_middle
)
6860 tree type
= NULL_TREE
;
6861 /* Middle _BitInt(N) is rewritten to casts to INTEGER_TYPEs
6862 with the same precision and back. */
6863 unsigned int nops
= gimple_num_ops (stmt
);
6864 for (unsigned int i
= is_gimple_assign (stmt
) ? 1 : 0;
6866 if (tree op
= gimple_op (stmt
, i
))
6868 tree nop
= maybe_cast_middle_bitint (&gsi
, op
, type
);
6870 gimple_set_op (stmt
, i
, nop
);
6871 else if (COMPARISON_CLASS_P (op
))
6873 TREE_OPERAND (op
, 0)
6874 = maybe_cast_middle_bitint (&gsi
,
6875 TREE_OPERAND (op
, 0),
6877 TREE_OPERAND (op
, 1)
6878 = maybe_cast_middle_bitint (&gsi
,
6879 TREE_OPERAND (op
, 1),
6882 else if (TREE_CODE (op
) == CASE_LABEL_EXPR
)
6885 = maybe_cast_middle_bitint (&gsi
, CASE_LOW (op
),
6888 = maybe_cast_middle_bitint (&gsi
, CASE_HIGH (op
),
6892 if (tree lhs
= gimple_get_lhs (stmt
))
6893 if (TREE_CODE (TREE_TYPE (lhs
)) == BITINT_TYPE
6894 && (bitint_precision_kind (TREE_TYPE (lhs
))
6895 == bitint_prec_middle
))
6897 int prec
= TYPE_PRECISION (TREE_TYPE (lhs
));
6898 int uns
= TYPE_UNSIGNED (TREE_TYPE (lhs
));
6899 type
= build_nonstandard_integer_type (prec
, uns
);
6900 tree lhs2
= make_ssa_name (type
);
6901 gimple_set_lhs (stmt
, lhs2
);
6902 gimple
*g
= gimple_build_assign (lhs
, NOP_EXPR
, lhs2
);
6903 if (stmt_ends_bb_p (stmt
))
6905 edge e
= find_fallthru_edge (gsi_bb (gsi
)->succs
);
6906 gsi_insert_on_edge (e
, g
);
6907 edge_insertions
= true;
6910 gsi_insert_after (&gsi
, g
, GSI_SAME_STMT
);
6916 if (tree lhs
= gimple_get_lhs (stmt
))
6917 if (TREE_CODE (lhs
) == SSA_NAME
)
6919 tree type
= TREE_TYPE (lhs
);
6920 if (TREE_CODE (type
) == COMPLEX_TYPE
)
6921 type
= TREE_TYPE (type
);
6922 if (TREE_CODE (type
) == BITINT_TYPE
6923 && bitint_precision_kind (type
) >= bitint_prec_large
6924 && (large_huge
.m_names
== NULL
6925 || !bitmap_bit_p (large_huge
.m_names
,
6926 SSA_NAME_VERSION (lhs
))))
6930 large_huge
.lower_stmt (stmt
);
6933 tree atype
= NULL_TREE
;
6934 for (gphi_iterator gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
);
6937 gphi
*phi
= gsi
.phi ();
6938 tree lhs
= gimple_phi_result (phi
);
6939 if (TREE_CODE (TREE_TYPE (lhs
)) != BITINT_TYPE
6940 || bitint_precision_kind (TREE_TYPE (lhs
)) < bitint_prec_large
)
6942 int p1
= var_to_partition (large_huge
.m_map
, lhs
);
6943 gcc_assert (large_huge
.m_vars
[p1
] != NULL_TREE
);
6944 tree v1
= large_huge
.m_vars
[p1
];
6945 for (unsigned i
= 0; i
< gimple_phi_num_args (phi
); ++i
)
6947 tree arg
= gimple_phi_arg_def (phi
, i
);
6948 edge e
= gimple_phi_arg_edge (phi
, i
);
6950 switch (TREE_CODE (arg
))
6953 if (integer_zerop (arg
) && VAR_P (v1
))
6955 tree zero
= build_zero_cst (TREE_TYPE (v1
));
6956 g
= gimple_build_assign (v1
, zero
);
6957 gsi_insert_on_edge (e
, g
);
6958 edge_insertions
= true;
6962 unsigned int min_prec
, prec
, rem
;
6964 prec
= TYPE_PRECISION (TREE_TYPE (arg
));
6965 rem
= prec
% (2 * limb_prec
);
6966 min_prec
= bitint_min_cst_precision (arg
, ext
);
6967 if (min_prec
> prec
- rem
- 2 * limb_prec
6968 && min_prec
> (unsigned) limb_prec
)
6969 /* Constant which has enough significant bits that it
6970 isn't worth trying to save .rodata space by extending
6971 from smaller number. */
6974 min_prec
= CEIL (min_prec
, limb_prec
) * limb_prec
;
6977 else if (min_prec
== prec
)
6978 c
= tree_output_constant_def (arg
);
6979 else if (min_prec
== (unsigned) limb_prec
)
6980 c
= fold_convert (large_huge
.m_limb_type
, arg
);
6983 tree ctype
= build_bitint_type (min_prec
, 1);
6984 c
= tree_output_constant_def (fold_convert (ctype
, arg
));
6988 if (VAR_P (v1
) && min_prec
== prec
)
6990 tree v2
= build1 (VIEW_CONVERT_EXPR
,
6992 g
= gimple_build_assign (v1
, v2
);
6993 gsi_insert_on_edge (e
, g
);
6994 edge_insertions
= true;
6997 if (TREE_CODE (TREE_TYPE (c
)) == INTEGER_TYPE
)
6998 g
= gimple_build_assign (build1 (VIEW_CONVERT_EXPR
,
7003 unsigned HOST_WIDE_INT nelts
7004 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (c
)))
7007 = build_array_type_nelts (large_huge
.m_limb_type
,
7009 g
= gimple_build_assign (build1 (VIEW_CONVERT_EXPR
,
7011 build1 (VIEW_CONVERT_EXPR
,
7014 gsi_insert_on_edge (e
, g
);
7018 unsigned HOST_WIDE_INT nelts
7019 = (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (v1
)))
7020 - min_prec
) / limb_prec
;
7022 = build_array_type_nelts (large_huge
.m_limb_type
,
7024 tree ptype
= build_pointer_type (TREE_TYPE (v1
));
7027 off
= fold_convert (ptype
,
7028 TYPE_SIZE_UNIT (TREE_TYPE (c
)));
7030 off
= build_zero_cst (ptype
);
7031 tree vd
= build2 (MEM_REF
, vtype
,
7032 build_fold_addr_expr (v1
), off
);
7033 g
= gimple_build_assign (vd
, build_zero_cst (vtype
));
7040 tree ptype
= build_pointer_type (TREE_TYPE (v1
));
7042 = fold_convert (ptype
,
7043 TYPE_SIZE_UNIT (TREE_TYPE (c
)));
7044 vd
= build2 (MEM_REF
, large_huge
.m_limb_type
,
7045 build_fold_addr_expr (v1
), off
);
7047 vd
= build_fold_addr_expr (vd
);
7048 unsigned HOST_WIDE_INT nbytes
7049 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (v1
)));
7052 -= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (c
)));
7053 tree fn
= builtin_decl_implicit (BUILT_IN_MEMSET
);
7054 g
= gimple_build_call (fn
, 3, vd
,
7055 integer_minus_one_node
,
7056 build_int_cst (sizetype
,
7059 gsi_insert_on_edge (e
, g
);
7060 edge_insertions
= true;
7065 if (gimple_code (SSA_NAME_DEF_STMT (arg
)) == GIMPLE_NOP
)
7067 if (large_huge
.m_names
== NULL
7068 || !bitmap_bit_p (large_huge
.m_names
,
7069 SSA_NAME_VERSION (arg
)))
7072 int p2
= var_to_partition (large_huge
.m_map
, arg
);
7075 gcc_assert (large_huge
.m_vars
[p2
] != NULL_TREE
);
7076 tree v2
= large_huge
.m_vars
[p2
];
7077 if (VAR_P (v1
) && VAR_P (v2
))
7078 g
= gimple_build_assign (v1
, v2
);
7079 else if (VAR_P (v1
))
7080 g
= gimple_build_assign (v1
, build1 (VIEW_CONVERT_EXPR
,
7081 TREE_TYPE (v1
), v2
));
7082 else if (VAR_P (v2
))
7083 g
= gimple_build_assign (build1 (VIEW_CONVERT_EXPR
,
7084 TREE_TYPE (v2
), v1
), v2
);
7087 if (atype
== NULL_TREE
7088 || !tree_int_cst_equal (TYPE_SIZE (atype
),
7089 TYPE_SIZE (TREE_TYPE (lhs
))))
7091 unsigned HOST_WIDE_INT nelts
7092 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs
)))
7095 = build_array_type_nelts (large_huge
.m_limb_type
,
7098 g
= gimple_build_assign (build1 (VIEW_CONVERT_EXPR
,
7100 build1 (VIEW_CONVERT_EXPR
,
7103 gsi_insert_on_edge (e
, g
);
7104 edge_insertions
= true;
7111 if (large_huge
.m_names
|| has_large_huge
)
7114 for (i
= 0; i
< num_ssa_names
; ++i
)
7116 tree s
= ssa_name (i
);
7119 tree type
= TREE_TYPE (s
);
7120 if (TREE_CODE (type
) == COMPLEX_TYPE
)
7121 type
= TREE_TYPE (type
);
7122 if (TREE_CODE (type
) == BITINT_TYPE
7123 && bitint_precision_kind (type
) >= bitint_prec_large
)
7125 if (large_huge
.m_preserved
7126 && bitmap_bit_p (large_huge
.m_preserved
,
7127 SSA_NAME_VERSION (s
)))
7129 gimple
*g
= SSA_NAME_DEF_STMT (s
);
7130 if (gimple_code (g
) == GIMPLE_NOP
)
7132 if (SSA_NAME_VAR (s
))
7133 set_ssa_default_def (cfun
, SSA_NAME_VAR (s
), NULL_TREE
);
7134 release_ssa_name (s
);
7137 if (gimple_bb (g
) == NULL
)
7139 release_ssa_name (s
);
7142 if (gimple_code (g
) != GIMPLE_ASM
)
7144 gimple_stmt_iterator gsi
= gsi_for_stmt (g
);
7145 bool save_vta
= flag_var_tracking_assignments
;
7146 flag_var_tracking_assignments
= false;
7147 gsi_remove (&gsi
, true);
7148 flag_var_tracking_assignments
= save_vta
;
7151 nop
= gimple_build_nop ();
7152 SSA_NAME_DEF_STMT (s
) = nop
;
7153 release_ssa_name (s
);
7157 disable_ranger (cfun
);
7160 if (edge_insertions
)
7161 gsi_commit_edge_inserts ();
7163 /* Fix up arguments of ECF_RETURNS_TWICE calls. Those were temporarily
7164 inserted before the call, but that is invalid IL, so move them to the
7165 right place and add corresponding PHIs. */
7166 if (!large_huge
.m_returns_twice_calls
.is_empty ())
7168 auto_vec
<gimple
*, 16> arg_stmts
;
7169 while (!large_huge
.m_returns_twice_calls
.is_empty ())
7171 gimple
*stmt
= large_huge
.m_returns_twice_calls
.pop ();
7172 gimple_stmt_iterator gsi
= gsi_after_labels (gimple_bb (stmt
));
7173 while (gsi_stmt (gsi
) != stmt
)
7175 if (is_gimple_debug (gsi_stmt (gsi
)))
7179 arg_stmts
.safe_push (gsi_stmt (gsi
));
7180 gsi_remove (&gsi
, false);
7184 basic_block bb
= NULL
;
7185 edge e
= NULL
, ead
= NULL
;
7186 FOR_EACH_VEC_ELT (arg_stmts
, i
, g
)
7188 gsi_safe_insert_before (&gsi
, g
);
7191 bb
= gimple_bb (stmt
);
7192 gcc_checking_assert (EDGE_COUNT (bb
->preds
) == 2);
7193 e
= EDGE_PRED (bb
, 0);
7194 ead
= EDGE_PRED (bb
, 1);
7195 if ((ead
->flags
& EDGE_ABNORMAL
) == 0)
7197 gcc_checking_assert ((e
->flags
& EDGE_ABNORMAL
) == 0
7198 && (ead
->flags
& EDGE_ABNORMAL
));
7200 tree lhs
= gimple_assign_lhs (g
);
7202 gphi
*phi
= create_phi_node (copy_ssa_name (arg
), bb
);
7203 add_phi_arg (phi
, arg
, e
, UNKNOWN_LOCATION
);
7204 tree var
= create_tmp_reg (TREE_TYPE (arg
));
7205 suppress_warning (var
, OPT_Wuninitialized
);
7206 arg
= get_or_create_ssa_default_def (cfun
, var
);
7207 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (arg
) = 1;
7208 add_phi_arg (phi
, arg
, ead
, UNKNOWN_LOCATION
);
7209 arg
= gimple_phi_result (phi
);
7210 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (arg
) = 1;
7211 imm_use_iterator iter
;
7213 FOR_EACH_IMM_USE_STMT (use_stmt
, iter
, lhs
)
7215 if (use_stmt
== phi
)
7217 gcc_checking_assert (use_stmt
== stmt
);
7218 use_operand_p use_p
;
7219 FOR_EACH_IMM_USE_ON_STMT (use_p
, iter
)
7220 SET_USE (use_p
, arg
);
7224 arg_stmts
.truncate (0);
7233 const pass_data pass_data_lower_bitint
=
7235 GIMPLE_PASS
, /* type */
7236 "bitintlower", /* name */
7237 OPTGROUP_NONE
, /* optinfo_flags */
7238 TV_NONE
, /* tv_id */
7239 PROP_ssa
, /* properties_required */
7240 PROP_gimple_lbitint
, /* properties_provided */
7241 0, /* properties_destroyed */
7242 0, /* todo_flags_start */
7243 0, /* todo_flags_finish */
7246 class pass_lower_bitint
: public gimple_opt_pass
7249 pass_lower_bitint (gcc::context
*ctxt
)
7250 : gimple_opt_pass (pass_data_lower_bitint
, ctxt
)
7253 /* opt_pass methods: */
7254 opt_pass
* clone () final override
{ return new pass_lower_bitint (m_ctxt
); }
7255 unsigned int execute (function
*) final override
7257 return gimple_lower_bitint ();
7260 }; // class pass_lower_bitint
7265 make_pass_lower_bitint (gcc::context
*ctxt
)
7267 return new pass_lower_bitint (ctxt
);
7273 const pass_data pass_data_lower_bitint_O0
=
7275 GIMPLE_PASS
, /* type */
7276 "bitintlower0", /* name */
7277 OPTGROUP_NONE
, /* optinfo_flags */
7278 TV_NONE
, /* tv_id */
7279 PROP_cfg
, /* properties_required */
7280 PROP_gimple_lbitint
, /* properties_provided */
7281 0, /* properties_destroyed */
7282 0, /* todo_flags_start */
7283 0, /* todo_flags_finish */
7286 class pass_lower_bitint_O0
: public gimple_opt_pass
7289 pass_lower_bitint_O0 (gcc::context
*ctxt
)
7290 : gimple_opt_pass (pass_data_lower_bitint_O0
, ctxt
)
7293 /* opt_pass methods: */
7294 bool gate (function
*fun
) final override
7296 /* With errors, normal optimization passes are not run. If we don't
7297 lower bitint operations at all, rtl expansion will abort. */
7298 return !(fun
->curr_properties
& PROP_gimple_lbitint
);
7301 unsigned int execute (function
*) final override
7303 return gimple_lower_bitint ();
7306 }; // class pass_lower_bitint_O0
7311 make_pass_lower_bitint_O0 (gcc::context
*ctxt
)
7313 return new pass_lower_bitint_O0 (ctxt
);