1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
23 /*@@ This file should be rewritten to use an arbitrary precision
24 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
25 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
26 @@ The routines that translate from the ap rep should
27 @@ warn if precision et. al. is lost.
28 @@ This would also make life easier when this technology is used
29 @@ for cross-compilers. */
31 /* The entry points in this file are fold, size_int_wide, size_binop
34 fold takes a tree as argument and returns a simplified tree.
36 size_binop takes a tree code for an arithmetic operation
37 and two operands that are trees, and produces a tree for the
38 result, assuming the type comes from `sizetype'.
40 size_int takes an integer value, and creates a tree constant
41 with type from `sizetype'.
43 force_fit_type takes a constant, an overflowable flag and prior
44 overflow indicators. It forces the value to fit the type and sets
45 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
49 #include "coretypes.h"
60 #include "langhooks.h"
63 /* The following constants represent a bit based encoding of GCC's
64 comparison operators. This encoding simplifies transformations
65 on relational comparison operators, such as AND and OR. */
66 enum comparison_code
{
85 static void encode (HOST_WIDE_INT
*, unsigned HOST_WIDE_INT
, HOST_WIDE_INT
);
86 static void decode (HOST_WIDE_INT
*, unsigned HOST_WIDE_INT
*, HOST_WIDE_INT
*);
87 static bool negate_mathfn_p (enum built_in_function
);
88 static bool negate_expr_p (tree
);
89 static tree
negate_expr (tree
);
90 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
91 static tree
associate_trees (tree
, tree
, enum tree_code
, tree
);
92 static tree
const_binop (enum tree_code
, tree
, tree
, int);
93 static enum comparison_code
comparison_to_compcode (enum tree_code
);
94 static enum tree_code
compcode_to_comparison (enum comparison_code
);
95 static tree
combine_comparisons (enum tree_code
, enum tree_code
,
96 enum tree_code
, tree
, tree
, tree
);
97 static int truth_value_p (enum tree_code
);
98 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
99 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
100 static tree
eval_subst (tree
, tree
, tree
, tree
, tree
);
101 static tree
pedantic_omit_one_operand (tree
, tree
, tree
);
102 static tree
distribute_bit_expr (enum tree_code
, tree
, tree
, tree
);
103 static tree
make_bit_field_ref (tree
, tree
, int, int, int);
104 static tree
optimize_bit_field_compare (enum tree_code
, tree
, tree
, tree
);
105 static tree
decode_field_reference (tree
, HOST_WIDE_INT
*, HOST_WIDE_INT
*,
106 enum machine_mode
*, int *, int *,
108 static int all_ones_mask_p (tree
, int);
109 static tree
sign_bit_p (tree
, tree
);
110 static int simple_operand_p (tree
);
111 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
112 static tree
make_range (tree
, int *, tree
*, tree
*);
113 static tree
build_range_check (tree
, tree
, int, tree
, tree
);
114 static int merge_ranges (int *, tree
*, tree
*, int, tree
, tree
, int, tree
,
116 static tree
fold_range_test (enum tree_code
, tree
, tree
, tree
);
117 static tree
fold_cond_expr_with_comparison (tree
, tree
, tree
, tree
);
118 static tree
unextend (tree
, int, int, tree
);
119 static tree
fold_truthop (enum tree_code
, tree
, tree
, tree
);
120 static tree
optimize_minmax_comparison (enum tree_code
, tree
, tree
, tree
);
121 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
);
122 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
);
123 static int multiple_of_p (tree
, tree
, tree
);
124 static tree
fold_binary_op_with_conditional_arg (enum tree_code
, tree
,
127 static bool fold_real_zero_addition_p (tree
, tree
, int);
128 static tree
fold_mathfn_compare (enum built_in_function
, enum tree_code
,
130 static tree
fold_inf_compare (enum tree_code
, tree
, tree
, tree
);
131 static tree
fold_div_compare (enum tree_code
, tree
, tree
, tree
);
132 static bool reorder_operands_p (tree
, tree
);
133 static tree
fold_negate_const (tree
, tree
);
134 static tree
fold_not_const (tree
, tree
);
135 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
137 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
138 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
139 and SUM1. Then this yields nonzero if overflow occurred during the
142 Overflow occurs if A and B have the same sign, but A and SUM differ in
143 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
145 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
147 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
148 We do that by representing the two-word integer in 4 words, with only
149 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
150 number. The value of the word is LOWPART + HIGHPART * BASE. */
153 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
154 #define HIGHPART(x) \
155 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
156 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
158 /* Unpack a two-word integer into 4 words.
159 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
160 WORDS points to the array of HOST_WIDE_INTs. */
163 encode (HOST_WIDE_INT
*words
, unsigned HOST_WIDE_INT low
, HOST_WIDE_INT hi
)
165 words
[0] = LOWPART (low
);
166 words
[1] = HIGHPART (low
);
167 words
[2] = LOWPART (hi
);
168 words
[3] = HIGHPART (hi
);
171 /* Pack an array of 4 words into a two-word integer.
172 WORDS points to the array of words.
173 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
176 decode (HOST_WIDE_INT
*words
, unsigned HOST_WIDE_INT
*low
,
179 *low
= words
[0] + words
[1] * BASE
;
180 *hi
= words
[2] + words
[3] * BASE
;
183 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
184 in overflow of the value, when >0 we are only interested in signed
185 overflow, for <0 we are interested in any overflow. OVERFLOWED
186 indicates whether overflow has already occurred. CONST_OVERFLOWED
187 indicates whether constant overflow has already occurred. We force
188 T's value to be within range of T's type (by setting to 0 or 1 all
189 the bits outside the type's range). We set TREE_OVERFLOWED if,
190 OVERFLOWED is nonzero,
191 or OVERFLOWABLE is >0 and signed overflow occurs
192 or OVERFLOWABLE is <0 and any overflow occurs
193 We set TREE_CONSTANT_OVERFLOWED if,
194 CONST_OVERFLOWED is nonzero
195 or we set TREE_OVERFLOWED.
196 We return either the original T, or a copy. */
199 force_fit_type (tree t
, int overflowable
,
200 bool overflowed
, bool overflowed_const
)
202 unsigned HOST_WIDE_INT low
;
205 int sign_extended_type
;
207 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
209 low
= TREE_INT_CST_LOW (t
);
210 high
= TREE_INT_CST_HIGH (t
);
212 if (POINTER_TYPE_P (TREE_TYPE (t
))
213 || TREE_CODE (TREE_TYPE (t
)) == OFFSET_TYPE
)
216 prec
= TYPE_PRECISION (TREE_TYPE (t
));
217 /* Size types *are* sign extended. */
218 sign_extended_type
= (!TYPE_UNSIGNED (TREE_TYPE (t
))
219 || (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
220 && TYPE_IS_SIZETYPE (TREE_TYPE (t
))));
222 /* First clear all bits that are beyond the type's precision. */
224 if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
226 else if (prec
> HOST_BITS_PER_WIDE_INT
)
227 high
&= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
231 if (prec
< HOST_BITS_PER_WIDE_INT
)
232 low
&= ~((HOST_WIDE_INT
) (-1) << prec
);
235 if (!sign_extended_type
)
236 /* No sign extension */;
237 else if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
238 /* Correct width already. */;
239 else if (prec
> HOST_BITS_PER_WIDE_INT
)
241 /* Sign extend top half? */
242 if (high
& ((unsigned HOST_WIDE_INT
)1
243 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)))
244 high
|= (HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
);
246 else if (prec
== HOST_BITS_PER_WIDE_INT
)
248 if ((HOST_WIDE_INT
)low
< 0)
253 /* Sign extend bottom half? */
254 if (low
& ((unsigned HOST_WIDE_INT
)1 << (prec
- 1)))
257 low
|= (HOST_WIDE_INT
)(-1) << prec
;
261 /* If the value changed, return a new node. */
262 if (overflowed
|| overflowed_const
263 || low
!= TREE_INT_CST_LOW (t
) || high
!= TREE_INT_CST_HIGH (t
))
265 t
= build_int_cst_wide (TREE_TYPE (t
), low
, high
);
269 || (overflowable
> 0 && sign_extended_type
))
272 TREE_OVERFLOW (t
) = 1;
273 TREE_CONSTANT_OVERFLOW (t
) = 1;
275 else if (overflowed_const
)
278 TREE_CONSTANT_OVERFLOW (t
) = 1;
285 /* Add two doubleword integers with doubleword result.
286 Return nonzero if the operation overflows according to UNSIGNED_P.
287 Each argument is given as two `HOST_WIDE_INT' pieces.
288 One argument is L1 and H1; the other, L2 and H2.
289 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
292 add_double_with_sign (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
293 unsigned HOST_WIDE_INT l2
, HOST_WIDE_INT h2
,
294 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
,
297 unsigned HOST_WIDE_INT l
;
301 h
= h1
+ h2
+ (l
< l1
);
307 return (unsigned HOST_WIDE_INT
) h
< (unsigned HOST_WIDE_INT
) h1
;
309 return OVERFLOW_SUM_SIGN (h1
, h2
, h
);
312 /* Negate a doubleword integer with doubleword result.
313 Return nonzero if the operation overflows, assuming it's signed.
314 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
315 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
318 neg_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
319 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
325 return (*hv
& h1
) < 0;
335 /* Multiply two doubleword integers with doubleword result.
336 Return nonzero if the operation overflows according to UNSIGNED_P.
337 Each argument is given as two `HOST_WIDE_INT' pieces.
338 One argument is L1 and H1; the other, L2 and H2.
339 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
342 mul_double_with_sign (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
343 unsigned HOST_WIDE_INT l2
, HOST_WIDE_INT h2
,
344 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
,
347 HOST_WIDE_INT arg1
[4];
348 HOST_WIDE_INT arg2
[4];
349 HOST_WIDE_INT prod
[4 * 2];
350 unsigned HOST_WIDE_INT carry
;
352 unsigned HOST_WIDE_INT toplow
, neglow
;
353 HOST_WIDE_INT tophigh
, neghigh
;
355 encode (arg1
, l1
, h1
);
356 encode (arg2
, l2
, h2
);
358 memset (prod
, 0, sizeof prod
);
360 for (i
= 0; i
< 4; i
++)
363 for (j
= 0; j
< 4; j
++)
366 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
367 carry
+= arg1
[i
] * arg2
[j
];
368 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
370 prod
[k
] = LOWPART (carry
);
371 carry
= HIGHPART (carry
);
376 decode (prod
, lv
, hv
);
377 decode (prod
+ 4, &toplow
, &tophigh
);
379 /* Unsigned overflow is immediate. */
381 return (toplow
| tophigh
) != 0;
383 /* Check for signed overflow by calculating the signed representation of the
384 top half of the result; it should agree with the low half's sign bit. */
387 neg_double (l2
, h2
, &neglow
, &neghigh
);
388 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
392 neg_double (l1
, h1
, &neglow
, &neghigh
);
393 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
395 return (*hv
< 0 ? ~(toplow
& tophigh
) : toplow
| tophigh
) != 0;
398 /* Shift the doubleword integer in L1, H1 left by COUNT places
399 keeping only PREC bits of result.
400 Shift right if COUNT is negative.
401 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
402 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
405 lshift_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
406 HOST_WIDE_INT count
, unsigned int prec
,
407 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
, int arith
)
409 unsigned HOST_WIDE_INT signmask
;
413 rshift_double (l1
, h1
, -count
, prec
, lv
, hv
, arith
);
417 if (SHIFT_COUNT_TRUNCATED
)
420 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
422 /* Shifting by the host word size is undefined according to the
423 ANSI standard, so we must handle this as a special case. */
427 else if (count
>= HOST_BITS_PER_WIDE_INT
)
429 *hv
= l1
<< (count
- HOST_BITS_PER_WIDE_INT
);
434 *hv
= (((unsigned HOST_WIDE_INT
) h1
<< count
)
435 | (l1
>> (HOST_BITS_PER_WIDE_INT
- count
- 1) >> 1));
439 /* Sign extend all bits that are beyond the precision. */
441 signmask
= -((prec
> HOST_BITS_PER_WIDE_INT
442 ? ((unsigned HOST_WIDE_INT
) *hv
443 >> (prec
- HOST_BITS_PER_WIDE_INT
- 1))
444 : (*lv
>> (prec
- 1))) & 1);
446 if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
448 else if (prec
>= HOST_BITS_PER_WIDE_INT
)
450 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
451 *hv
|= signmask
<< (prec
- HOST_BITS_PER_WIDE_INT
);
456 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << prec
);
457 *lv
|= signmask
<< prec
;
461 /* Shift the doubleword integer in L1, H1 right by COUNT places
462 keeping only PREC bits of result. COUNT must be positive.
463 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
464 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
467 rshift_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
468 HOST_WIDE_INT count
, unsigned int prec
,
469 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
,
472 unsigned HOST_WIDE_INT signmask
;
475 ? -((unsigned HOST_WIDE_INT
) h1
>> (HOST_BITS_PER_WIDE_INT
- 1))
478 if (SHIFT_COUNT_TRUNCATED
)
481 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
483 /* Shifting by the host word size is undefined according to the
484 ANSI standard, so we must handle this as a special case. */
488 else if (count
>= HOST_BITS_PER_WIDE_INT
)
491 *lv
= (unsigned HOST_WIDE_INT
) h1
>> (count
- HOST_BITS_PER_WIDE_INT
);
495 *hv
= (unsigned HOST_WIDE_INT
) h1
>> count
;
497 | ((unsigned HOST_WIDE_INT
) h1
<< (HOST_BITS_PER_WIDE_INT
- count
- 1) << 1));
500 /* Zero / sign extend all bits that are beyond the precision. */
502 if (count
>= (HOST_WIDE_INT
)prec
)
507 else if ((prec
- count
) >= 2 * HOST_BITS_PER_WIDE_INT
)
509 else if ((prec
- count
) >= HOST_BITS_PER_WIDE_INT
)
511 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- count
- HOST_BITS_PER_WIDE_INT
));
512 *hv
|= signmask
<< (prec
- count
- HOST_BITS_PER_WIDE_INT
);
517 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << (prec
- count
));
518 *lv
|= signmask
<< (prec
- count
);
522 /* Rotate the doubleword integer in L1, H1 left by COUNT places
523 keeping only PREC bits of result.
524 Rotate right if COUNT is negative.
525 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
528 lrotate_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
529 HOST_WIDE_INT count
, unsigned int prec
,
530 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
532 unsigned HOST_WIDE_INT s1l
, s2l
;
533 HOST_WIDE_INT s1h
, s2h
;
539 lshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
540 rshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
545 /* Rotate the doubleword integer in L1, H1 left by COUNT places
546 keeping only PREC bits of result. COUNT must be positive.
547 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
550 rrotate_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
551 HOST_WIDE_INT count
, unsigned int prec
,
552 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
554 unsigned HOST_WIDE_INT s1l
, s2l
;
555 HOST_WIDE_INT s1h
, s2h
;
561 rshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
562 lshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
567 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
568 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
569 CODE is a tree code for a kind of division, one of
570 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
572 It controls how the quotient is rounded to an integer.
573 Return nonzero if the operation overflows.
574 UNS nonzero says do unsigned division. */
577 div_and_round_double (enum tree_code code
, int uns
,
578 unsigned HOST_WIDE_INT lnum_orig
, /* num == numerator == dividend */
579 HOST_WIDE_INT hnum_orig
,
580 unsigned HOST_WIDE_INT lden_orig
, /* den == denominator == divisor */
581 HOST_WIDE_INT hden_orig
,
582 unsigned HOST_WIDE_INT
*lquo
,
583 HOST_WIDE_INT
*hquo
, unsigned HOST_WIDE_INT
*lrem
,
587 HOST_WIDE_INT num
[4 + 1]; /* extra element for scaling. */
588 HOST_WIDE_INT den
[4], quo
[4];
590 unsigned HOST_WIDE_INT work
;
591 unsigned HOST_WIDE_INT carry
= 0;
592 unsigned HOST_WIDE_INT lnum
= lnum_orig
;
593 HOST_WIDE_INT hnum
= hnum_orig
;
594 unsigned HOST_WIDE_INT lden
= lden_orig
;
595 HOST_WIDE_INT hden
= hden_orig
;
598 if (hden
== 0 && lden
== 0)
599 overflow
= 1, lden
= 1;
601 /* Calculate quotient sign and convert operands to unsigned. */
607 /* (minimum integer) / (-1) is the only overflow case. */
608 if (neg_double (lnum
, hnum
, &lnum
, &hnum
)
609 && ((HOST_WIDE_INT
) lden
& hden
) == -1)
615 neg_double (lden
, hden
, &lden
, &hden
);
619 if (hnum
== 0 && hden
== 0)
620 { /* single precision */
622 /* This unsigned division rounds toward zero. */
628 { /* trivial case: dividend < divisor */
629 /* hden != 0 already checked. */
636 memset (quo
, 0, sizeof quo
);
638 memset (num
, 0, sizeof num
); /* to zero 9th element */
639 memset (den
, 0, sizeof den
);
641 encode (num
, lnum
, hnum
);
642 encode (den
, lden
, hden
);
644 /* Special code for when the divisor < BASE. */
645 if (hden
== 0 && lden
< (unsigned HOST_WIDE_INT
) BASE
)
647 /* hnum != 0 already checked. */
648 for (i
= 4 - 1; i
>= 0; i
--)
650 work
= num
[i
] + carry
* BASE
;
651 quo
[i
] = work
/ lden
;
657 /* Full double precision division,
658 with thanks to Don Knuth's "Seminumerical Algorithms". */
659 int num_hi_sig
, den_hi_sig
;
660 unsigned HOST_WIDE_INT quo_est
, scale
;
662 /* Find the highest nonzero divisor digit. */
663 for (i
= 4 - 1;; i
--)
670 /* Insure that the first digit of the divisor is at least BASE/2.
671 This is required by the quotient digit estimation algorithm. */
673 scale
= BASE
/ (den
[den_hi_sig
] + 1);
675 { /* scale divisor and dividend */
677 for (i
= 0; i
<= 4 - 1; i
++)
679 work
= (num
[i
] * scale
) + carry
;
680 num
[i
] = LOWPART (work
);
681 carry
= HIGHPART (work
);
686 for (i
= 0; i
<= 4 - 1; i
++)
688 work
= (den
[i
] * scale
) + carry
;
689 den
[i
] = LOWPART (work
);
690 carry
= HIGHPART (work
);
691 if (den
[i
] != 0) den_hi_sig
= i
;
698 for (i
= num_hi_sig
- den_hi_sig
- 1; i
>= 0; i
--)
700 /* Guess the next quotient digit, quo_est, by dividing the first
701 two remaining dividend digits by the high order quotient digit.
702 quo_est is never low and is at most 2 high. */
703 unsigned HOST_WIDE_INT tmp
;
705 num_hi_sig
= i
+ den_hi_sig
+ 1;
706 work
= num
[num_hi_sig
] * BASE
+ num
[num_hi_sig
- 1];
707 if (num
[num_hi_sig
] != den
[den_hi_sig
])
708 quo_est
= work
/ den
[den_hi_sig
];
712 /* Refine quo_est so it's usually correct, and at most one high. */
713 tmp
= work
- quo_est
* den
[den_hi_sig
];
715 && (den
[den_hi_sig
- 1] * quo_est
716 > (tmp
* BASE
+ num
[num_hi_sig
- 2])))
719 /* Try QUO_EST as the quotient digit, by multiplying the
720 divisor by QUO_EST and subtracting from the remaining dividend.
721 Keep in mind that QUO_EST is the I - 1st digit. */
724 for (j
= 0; j
<= den_hi_sig
; j
++)
726 work
= quo_est
* den
[j
] + carry
;
727 carry
= HIGHPART (work
);
728 work
= num
[i
+ j
] - LOWPART (work
);
729 num
[i
+ j
] = LOWPART (work
);
730 carry
+= HIGHPART (work
) != 0;
733 /* If quo_est was high by one, then num[i] went negative and
734 we need to correct things. */
735 if (num
[num_hi_sig
] < (HOST_WIDE_INT
) carry
)
738 carry
= 0; /* add divisor back in */
739 for (j
= 0; j
<= den_hi_sig
; j
++)
741 work
= num
[i
+ j
] + den
[j
] + carry
;
742 carry
= HIGHPART (work
);
743 num
[i
+ j
] = LOWPART (work
);
746 num
[num_hi_sig
] += carry
;
749 /* Store the quotient digit. */
754 decode (quo
, lquo
, hquo
);
757 /* If result is negative, make it so. */
759 neg_double (*lquo
, *hquo
, lquo
, hquo
);
761 /* Compute trial remainder: rem = num - (quo * den) */
762 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
763 neg_double (*lrem
, *hrem
, lrem
, hrem
);
764 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
769 case TRUNC_MOD_EXPR
: /* round toward zero */
770 case EXACT_DIV_EXPR
: /* for this one, it shouldn't matter */
774 case FLOOR_MOD_EXPR
: /* round toward negative infinity */
775 if (quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio < 0 && rem != 0 */
778 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1,
786 case CEIL_MOD_EXPR
: /* round toward positive infinity */
787 if (!quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio > 0 && rem != 0 */
789 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
797 case ROUND_MOD_EXPR
: /* round to closest integer */
799 unsigned HOST_WIDE_INT labs_rem
= *lrem
;
800 HOST_WIDE_INT habs_rem
= *hrem
;
801 unsigned HOST_WIDE_INT labs_den
= lden
, ltwice
;
802 HOST_WIDE_INT habs_den
= hden
, htwice
;
804 /* Get absolute values. */
806 neg_double (*lrem
, *hrem
, &labs_rem
, &habs_rem
);
808 neg_double (lden
, hden
, &labs_den
, &habs_den
);
810 /* If (2 * abs (lrem) >= abs (lden)) */
811 mul_double ((HOST_WIDE_INT
) 2, (HOST_WIDE_INT
) 0,
812 labs_rem
, habs_rem
, <wice
, &htwice
);
814 if (((unsigned HOST_WIDE_INT
) habs_den
815 < (unsigned HOST_WIDE_INT
) htwice
)
816 || (((unsigned HOST_WIDE_INT
) habs_den
817 == (unsigned HOST_WIDE_INT
) htwice
)
818 && (labs_den
< ltwice
)))
822 add_double (*lquo
, *hquo
,
823 (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1, lquo
, hquo
);
826 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
838 /* Compute true remainder: rem = num - (quo * den) */
839 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
840 neg_double (*lrem
, *hrem
, lrem
, hrem
);
841 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
845 /* If ARG2 divides ARG1 with zero remainder, carries out the division
846 of type CODE and returns the quotient.
847 Otherwise returns NULL_TREE. */
850 div_if_zero_remainder (enum tree_code code
, tree arg1
, tree arg2
)
852 unsigned HOST_WIDE_INT int1l
, int2l
;
853 HOST_WIDE_INT int1h
, int2h
;
854 unsigned HOST_WIDE_INT quol
, reml
;
855 HOST_WIDE_INT quoh
, remh
;
856 tree type
= TREE_TYPE (arg1
);
857 int uns
= TYPE_UNSIGNED (type
);
859 int1l
= TREE_INT_CST_LOW (arg1
);
860 int1h
= TREE_INT_CST_HIGH (arg1
);
861 int2l
= TREE_INT_CST_LOW (arg2
);
862 int2h
= TREE_INT_CST_HIGH (arg2
);
864 div_and_round_double (code
, uns
, int1l
, int1h
, int2l
, int2h
,
865 &quol
, &quoh
, &reml
, &remh
);
866 if (remh
!= 0 || reml
!= 0)
869 return build_int_cst_wide (type
, quol
, quoh
);
872 /* Return true if the built-in mathematical function specified by CODE
873 is odd, i.e. -f(x) == f(-x). */
876 negate_mathfn_p (enum built_in_function code
)
900 /* Check whether we may negate an integer constant T without causing
904 may_negate_without_overflow_p (tree t
)
906 unsigned HOST_WIDE_INT val
;
910 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
912 type
= TREE_TYPE (t
);
913 if (TYPE_UNSIGNED (type
))
916 prec
= TYPE_PRECISION (type
);
917 if (prec
> HOST_BITS_PER_WIDE_INT
)
919 if (TREE_INT_CST_LOW (t
) != 0)
921 prec
-= HOST_BITS_PER_WIDE_INT
;
922 val
= TREE_INT_CST_HIGH (t
);
925 val
= TREE_INT_CST_LOW (t
);
926 if (prec
< HOST_BITS_PER_WIDE_INT
)
927 val
&= ((unsigned HOST_WIDE_INT
) 1 << prec
) - 1;
928 return val
!= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1));
931 /* Determine whether an expression T can be cheaply negated using
932 the function negate_expr. */
935 negate_expr_p (tree t
)
942 type
= TREE_TYPE (t
);
945 switch (TREE_CODE (t
))
948 if (TYPE_UNSIGNED (type
) || ! flag_trapv
)
951 /* Check that -CST will not overflow type. */
952 return may_negate_without_overflow_p (t
);
959 return negate_expr_p (TREE_REALPART (t
))
960 && negate_expr_p (TREE_IMAGPART (t
));
963 if (FLOAT_TYPE_P (type
) && !flag_unsafe_math_optimizations
)
965 /* -(A + B) -> (-B) - A. */
966 if (negate_expr_p (TREE_OPERAND (t
, 1))
967 && reorder_operands_p (TREE_OPERAND (t
, 0),
968 TREE_OPERAND (t
, 1)))
970 /* -(A + B) -> (-A) - B. */
971 return negate_expr_p (TREE_OPERAND (t
, 0));
974 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
975 return (! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
976 && reorder_operands_p (TREE_OPERAND (t
, 0),
977 TREE_OPERAND (t
, 1));
980 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
986 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
987 return negate_expr_p (TREE_OPERAND (t
, 1))
988 || negate_expr_p (TREE_OPERAND (t
, 0));
992 /* Negate -((double)float) as (double)(-float). */
993 if (TREE_CODE (type
) == REAL_TYPE
)
995 tree tem
= strip_float_extensions (t
);
997 return negate_expr_p (tem
);
1002 /* Negate -f(x) as f(-x). */
1003 if (negate_mathfn_p (builtin_mathfn_code (t
)))
1004 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t
, 1)));
1008 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1009 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
1011 tree op1
= TREE_OPERAND (t
, 1);
1012 if (TREE_INT_CST_HIGH (op1
) == 0
1013 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
1014 == TREE_INT_CST_LOW (op1
))
1025 /* Given T, an expression, return the negation of T. Allow for T to be
1026 null, in which case return null. */
1029 negate_expr (tree t
)
1037 type
= TREE_TYPE (t
);
1038 STRIP_SIGN_NOPS (t
);
1040 switch (TREE_CODE (t
))
1043 tem
= fold_negate_const (t
, type
);
1044 if (! TREE_OVERFLOW (tem
)
1045 || TYPE_UNSIGNED (type
)
1048 return build1 (NEGATE_EXPR
, type
, t
);
1051 tem
= fold_negate_const (t
, type
);
1052 /* Two's complement FP formats, such as c4x, may overflow. */
1053 if (! TREE_OVERFLOW (tem
) || ! flag_trapping_math
)
1054 return fold_convert (type
, tem
);
1055 return build1 (NEGATE_EXPR
, type
, t
);
1059 tree rpart
= negate_expr (TREE_REALPART (t
));
1060 tree ipart
= negate_expr (TREE_IMAGPART (t
));
1062 if ((TREE_CODE (rpart
) == REAL_CST
1063 && TREE_CODE (ipart
) == REAL_CST
)
1064 || (TREE_CODE (rpart
) == INTEGER_CST
1065 && TREE_CODE (ipart
) == INTEGER_CST
))
1066 return build_complex (type
, rpart
, ipart
);
1071 return fold_convert (type
, TREE_OPERAND (t
, 0));
1074 if (! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
1076 /* -(A + B) -> (-B) - A. */
1077 if (negate_expr_p (TREE_OPERAND (t
, 1))
1078 && reorder_operands_p (TREE_OPERAND (t
, 0),
1079 TREE_OPERAND (t
, 1)))
1081 tem
= negate_expr (TREE_OPERAND (t
, 1));
1082 tem
= fold_build2 (MINUS_EXPR
, TREE_TYPE (t
),
1083 tem
, TREE_OPERAND (t
, 0));
1084 return fold_convert (type
, tem
);
1087 /* -(A + B) -> (-A) - B. */
1088 if (negate_expr_p (TREE_OPERAND (t
, 0)))
1090 tem
= negate_expr (TREE_OPERAND (t
, 0));
1091 tem
= fold_build2 (MINUS_EXPR
, TREE_TYPE (t
),
1092 tem
, TREE_OPERAND (t
, 1));
1093 return fold_convert (type
, tem
);
1099 /* - (A - B) -> B - A */
1100 if ((! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
1101 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
1102 return fold_convert (type
,
1103 fold_build2 (MINUS_EXPR
, TREE_TYPE (t
),
1104 TREE_OPERAND (t
, 1),
1105 TREE_OPERAND (t
, 0)));
1109 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
1115 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
1117 tem
= TREE_OPERAND (t
, 1);
1118 if (negate_expr_p (tem
))
1119 return fold_convert (type
,
1120 fold_build2 (TREE_CODE (t
), TREE_TYPE (t
),
1121 TREE_OPERAND (t
, 0),
1122 negate_expr (tem
)));
1123 tem
= TREE_OPERAND (t
, 0);
1124 if (negate_expr_p (tem
))
1125 return fold_convert (type
,
1126 fold_build2 (TREE_CODE (t
), TREE_TYPE (t
),
1128 TREE_OPERAND (t
, 1)));
1133 /* Convert -((double)float) into (double)(-float). */
1134 if (TREE_CODE (type
) == REAL_TYPE
)
1136 tem
= strip_float_extensions (t
);
1137 if (tem
!= t
&& negate_expr_p (tem
))
1138 return fold_convert (type
, negate_expr (tem
));
1143 /* Negate -f(x) as f(-x). */
1144 if (negate_mathfn_p (builtin_mathfn_code (t
))
1145 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t
, 1))))
1147 tree fndecl
, arg
, arglist
;
1149 fndecl
= get_callee_fndecl (t
);
1150 arg
= negate_expr (TREE_VALUE (TREE_OPERAND (t
, 1)));
1151 arglist
= build_tree_list (NULL_TREE
, arg
);
1152 return build_function_call_expr (fndecl
, arglist
);
1157 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1158 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
1160 tree op1
= TREE_OPERAND (t
, 1);
1161 if (TREE_INT_CST_HIGH (op1
) == 0
1162 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
1163 == TREE_INT_CST_LOW (op1
))
1165 tree ntype
= TYPE_UNSIGNED (type
)
1166 ? lang_hooks
.types
.signed_type (type
)
1167 : lang_hooks
.types
.unsigned_type (type
);
1168 tree temp
= fold_convert (ntype
, TREE_OPERAND (t
, 0));
1169 temp
= fold_build2 (RSHIFT_EXPR
, ntype
, temp
, op1
);
1170 return fold_convert (type
, temp
);
1179 tem
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (t
), t
);
1180 return fold_convert (type
, tem
);
1183 /* Split a tree IN into a constant, literal and variable parts that could be
1184 combined with CODE to make IN. "constant" means an expression with
1185 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1186 commutative arithmetic operation. Store the constant part into *CONP,
1187 the literal in *LITP and return the variable part. If a part isn't
1188 present, set it to null. If the tree does not decompose in this way,
1189 return the entire tree as the variable part and the other parts as null.
1191 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1192 case, we negate an operand that was subtracted. Except if it is a
1193 literal for which we use *MINUS_LITP instead.
1195 If NEGATE_P is true, we are negating all of IN, again except a literal
1196 for which we use *MINUS_LITP instead.
1198 If IN is itself a literal or constant, return it as appropriate.
1200 Note that we do not guarantee that any of the three values will be the
1201 same type as IN, but they will have the same signedness and mode. */
1204 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
1205 tree
*minus_litp
, int negate_p
)
1213 /* Strip any conversions that don't change the machine mode or signedness. */
1214 STRIP_SIGN_NOPS (in
);
1216 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
)
1218 else if (TREE_CODE (in
) == code
1219 || (! FLOAT_TYPE_P (TREE_TYPE (in
))
1220 /* We can associate addition and subtraction together (even
1221 though the C standard doesn't say so) for integers because
1222 the value is not affected. For reals, the value might be
1223 affected, so we can't. */
1224 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
1225 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
1227 tree op0
= TREE_OPERAND (in
, 0);
1228 tree op1
= TREE_OPERAND (in
, 1);
1229 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
1230 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
1232 /* First see if either of the operands is a literal, then a constant. */
1233 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
)
1234 *litp
= op0
, op0
= 0;
1235 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
)
1236 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
1238 if (op0
!= 0 && TREE_CONSTANT (op0
))
1239 *conp
= op0
, op0
= 0;
1240 else if (op1
!= 0 && TREE_CONSTANT (op1
))
1241 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
1243 /* If we haven't dealt with either operand, this is not a case we can
1244 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1245 if (op0
!= 0 && op1
!= 0)
1250 var
= op1
, neg_var_p
= neg1_p
;
1252 /* Now do any needed negations. */
1254 *minus_litp
= *litp
, *litp
= 0;
1256 *conp
= negate_expr (*conp
);
1258 var
= negate_expr (var
);
1260 else if (TREE_CONSTANT (in
))
1268 *minus_litp
= *litp
, *litp
= 0;
1269 else if (*minus_litp
)
1270 *litp
= *minus_litp
, *minus_litp
= 0;
1271 *conp
= negate_expr (*conp
);
1272 var
= negate_expr (var
);
1278 /* Re-associate trees split by the above function. T1 and T2 are either
1279 expressions to associate or null. Return the new expression, if any. If
1280 we build an operation, do it in TYPE and with CODE. */
1283 associate_trees (tree t1
, tree t2
, enum tree_code code
, tree type
)
1290 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1291 try to fold this since we will have infinite recursion. But do
1292 deal with any NEGATE_EXPRs. */
1293 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
1294 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
1296 if (code
== PLUS_EXPR
)
1298 if (TREE_CODE (t1
) == NEGATE_EXPR
)
1299 return build2 (MINUS_EXPR
, type
, fold_convert (type
, t2
),
1300 fold_convert (type
, TREE_OPERAND (t1
, 0)));
1301 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
1302 return build2 (MINUS_EXPR
, type
, fold_convert (type
, t1
),
1303 fold_convert (type
, TREE_OPERAND (t2
, 0)));
1304 else if (integer_zerop (t2
))
1305 return fold_convert (type
, t1
);
1307 else if (code
== MINUS_EXPR
)
1309 if (integer_zerop (t2
))
1310 return fold_convert (type
, t1
);
1313 return build2 (code
, type
, fold_convert (type
, t1
),
1314 fold_convert (type
, t2
));
1317 return fold_build2 (code
, type
, fold_convert (type
, t1
),
1318 fold_convert (type
, t2
));
1321 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1322 to produce a new constant.
1324 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1327 int_const_binop (enum tree_code code
, tree arg1
, tree arg2
, int notrunc
)
1329 unsigned HOST_WIDE_INT int1l
, int2l
;
1330 HOST_WIDE_INT int1h
, int2h
;
1331 unsigned HOST_WIDE_INT low
;
1333 unsigned HOST_WIDE_INT garbagel
;
1334 HOST_WIDE_INT garbageh
;
1336 tree type
= TREE_TYPE (arg1
);
1337 int uns
= TYPE_UNSIGNED (type
);
1339 = (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
));
1342 int1l
= TREE_INT_CST_LOW (arg1
);
1343 int1h
= TREE_INT_CST_HIGH (arg1
);
1344 int2l
= TREE_INT_CST_LOW (arg2
);
1345 int2h
= TREE_INT_CST_HIGH (arg2
);
1350 low
= int1l
| int2l
, hi
= int1h
| int2h
;
1354 low
= int1l
^ int2l
, hi
= int1h
^ int2h
;
1358 low
= int1l
& int2l
, hi
= int1h
& int2h
;
1364 /* It's unclear from the C standard whether shifts can overflow.
1365 The following code ignores overflow; perhaps a C standard
1366 interpretation ruling is needed. */
1367 lshift_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1374 lrotate_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1379 overflow
= add_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1383 neg_double (int2l
, int2h
, &low
, &hi
);
1384 add_double (int1l
, int1h
, low
, hi
, &low
, &hi
);
1385 overflow
= OVERFLOW_SUM_SIGN (hi
, int2h
, int1h
);
1389 overflow
= mul_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1392 case TRUNC_DIV_EXPR
:
1393 case FLOOR_DIV_EXPR
: case CEIL_DIV_EXPR
:
1394 case EXACT_DIV_EXPR
:
1395 /* This is a shortcut for a common special case. */
1396 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1397 && ! TREE_CONSTANT_OVERFLOW (arg1
)
1398 && ! TREE_CONSTANT_OVERFLOW (arg2
)
1399 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1401 if (code
== CEIL_DIV_EXPR
)
1404 low
= int1l
/ int2l
, hi
= 0;
1408 /* ... fall through ... */
1410 case ROUND_DIV_EXPR
:
1411 if (int2h
== 0 && int2l
== 1)
1413 low
= int1l
, hi
= int1h
;
1416 if (int1l
== int2l
&& int1h
== int2h
1417 && ! (int1l
== 0 && int1h
== 0))
1422 overflow
= div_and_round_double (code
, uns
, int1l
, int1h
, int2l
, int2h
,
1423 &low
, &hi
, &garbagel
, &garbageh
);
1426 case TRUNC_MOD_EXPR
:
1427 case FLOOR_MOD_EXPR
: case CEIL_MOD_EXPR
:
1428 /* This is a shortcut for a common special case. */
1429 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1430 && ! TREE_CONSTANT_OVERFLOW (arg1
)
1431 && ! TREE_CONSTANT_OVERFLOW (arg2
)
1432 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1434 if (code
== CEIL_MOD_EXPR
)
1436 low
= int1l
% int2l
, hi
= 0;
1440 /* ... fall through ... */
1442 case ROUND_MOD_EXPR
:
1443 overflow
= div_and_round_double (code
, uns
,
1444 int1l
, int1h
, int2l
, int2h
,
1445 &garbagel
, &garbageh
, &low
, &hi
);
1451 low
= (((unsigned HOST_WIDE_INT
) int1h
1452 < (unsigned HOST_WIDE_INT
) int2h
)
1453 || (((unsigned HOST_WIDE_INT
) int1h
1454 == (unsigned HOST_WIDE_INT
) int2h
)
1457 low
= (int1h
< int2h
1458 || (int1h
== int2h
&& int1l
< int2l
));
1460 if (low
== (code
== MIN_EXPR
))
1461 low
= int1l
, hi
= int1h
;
1463 low
= int2l
, hi
= int2h
;
1470 t
= build_int_cst_wide (TREE_TYPE (arg1
), low
, hi
);
1474 /* Propagate overflow flags ourselves. */
1475 if (((!uns
|| is_sizetype
) && overflow
)
1476 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1479 TREE_OVERFLOW (t
) = 1;
1480 TREE_CONSTANT_OVERFLOW (t
) = 1;
1482 else if (TREE_CONSTANT_OVERFLOW (arg1
) | TREE_CONSTANT_OVERFLOW (arg2
))
1485 TREE_CONSTANT_OVERFLOW (t
) = 1;
1489 t
= force_fit_type (t
, 1,
1490 ((!uns
|| is_sizetype
) && overflow
)
1491 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
),
1492 TREE_CONSTANT_OVERFLOW (arg1
)
1493 | TREE_CONSTANT_OVERFLOW (arg2
));
1498 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1499 constant. We assume ARG1 and ARG2 have the same data type, or at least
1500 are the same kind of constant and the same machine mode. Return zero if
1501 combining the constants is not allowed in the current operating mode.
1503 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1506 const_binop (enum tree_code code
, tree arg1
, tree arg2
, int notrunc
)
1508 /* Sanity check for the recursive cases. */
1515 if (TREE_CODE (arg1
) == INTEGER_CST
)
1516 return int_const_binop (code
, arg1
, arg2
, notrunc
);
1518 if (TREE_CODE (arg1
) == REAL_CST
)
1520 enum machine_mode mode
;
1523 REAL_VALUE_TYPE value
;
1524 REAL_VALUE_TYPE result
;
1528 d1
= TREE_REAL_CST (arg1
);
1529 d2
= TREE_REAL_CST (arg2
);
1531 type
= TREE_TYPE (arg1
);
1532 mode
= TYPE_MODE (type
);
1534 /* Don't perform operation if we honor signaling NaNs and
1535 either operand is a NaN. */
1536 if (HONOR_SNANS (mode
)
1537 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1540 /* Don't perform operation if it would raise a division
1541 by zero exception. */
1542 if (code
== RDIV_EXPR
1543 && REAL_VALUES_EQUAL (d2
, dconst0
)
1544 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1547 /* If either operand is a NaN, just return it. Otherwise, set up
1548 for floating-point trap; we return an overflow. */
1549 if (REAL_VALUE_ISNAN (d1
))
1551 else if (REAL_VALUE_ISNAN (d2
))
1554 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1555 real_convert (&result
, mode
, &value
);
1557 /* Don't constant fold this floating point operation if
1558 the result has overflowed and flag_trapping_math. */
1559 if (flag_trapping_math
1560 && MODE_HAS_INFINITIES (mode
)
1561 && REAL_VALUE_ISINF (result
)
1562 && !REAL_VALUE_ISINF (d1
)
1563 && !REAL_VALUE_ISINF (d2
))
1566 /* Don't constant fold this floating point operation if the
1567 result may dependent upon the run-time rounding mode and
1568 flag_rounding_math is set, or if GCC's software emulation
1569 is unable to accurately represent the result. */
1570 if ((flag_rounding_math
1571 || (REAL_MODE_FORMAT_COMPOSITE_P (mode
)
1572 && !flag_unsafe_math_optimizations
))
1573 && (inexact
|| !real_identical (&result
, &value
)))
1576 t
= build_real (type
, result
);
1578 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1579 TREE_CONSTANT_OVERFLOW (t
)
1581 | TREE_CONSTANT_OVERFLOW (arg1
)
1582 | TREE_CONSTANT_OVERFLOW (arg2
);
1586 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1588 tree type
= TREE_TYPE (arg1
);
1589 tree r1
= TREE_REALPART (arg1
);
1590 tree i1
= TREE_IMAGPART (arg1
);
1591 tree r2
= TREE_REALPART (arg2
);
1592 tree i2
= TREE_IMAGPART (arg2
);
1599 real
= const_binop (code
, r1
, r2
, notrunc
);
1600 imag
= const_binop (code
, i1
, i2
, notrunc
);
1604 real
= const_binop (MINUS_EXPR
,
1605 const_binop (MULT_EXPR
, r1
, r2
, notrunc
),
1606 const_binop (MULT_EXPR
, i1
, i2
, notrunc
),
1608 imag
= const_binop (PLUS_EXPR
,
1609 const_binop (MULT_EXPR
, r1
, i2
, notrunc
),
1610 const_binop (MULT_EXPR
, i1
, r2
, notrunc
),
1617 = const_binop (PLUS_EXPR
,
1618 const_binop (MULT_EXPR
, r2
, r2
, notrunc
),
1619 const_binop (MULT_EXPR
, i2
, i2
, notrunc
),
1622 = const_binop (PLUS_EXPR
,
1623 const_binop (MULT_EXPR
, r1
, r2
, notrunc
),
1624 const_binop (MULT_EXPR
, i1
, i2
, notrunc
),
1627 = const_binop (MINUS_EXPR
,
1628 const_binop (MULT_EXPR
, i1
, r2
, notrunc
),
1629 const_binop (MULT_EXPR
, r1
, i2
, notrunc
),
1632 if (INTEGRAL_TYPE_P (TREE_TYPE (r1
)))
1633 code
= TRUNC_DIV_EXPR
;
1635 real
= const_binop (code
, t1
, magsquared
, notrunc
);
1636 imag
= const_binop (code
, t2
, magsquared
, notrunc
);
1645 return build_complex (type
, real
, imag
);
1651 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1652 indicates which particular sizetype to create. */
1655 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1657 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1660 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1661 is a tree code. The type of the result is taken from the operands.
1662 Both must be the same type integer type and it must be a size type.
1663 If the operands are constant, so is the result. */
1666 size_binop (enum tree_code code
, tree arg0
, tree arg1
)
1668 tree type
= TREE_TYPE (arg0
);
1670 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1671 return error_mark_node
;
1673 gcc_assert (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
)
1674 && type
== TREE_TYPE (arg1
));
1676 /* Handle the special case of two integer constants faster. */
1677 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1679 /* And some specific cases even faster than that. */
1680 if (code
== PLUS_EXPR
&& integer_zerop (arg0
))
1682 else if ((code
== MINUS_EXPR
|| code
== PLUS_EXPR
)
1683 && integer_zerop (arg1
))
1685 else if (code
== MULT_EXPR
&& integer_onep (arg0
))
1688 /* Handle general case of two integer constants. */
1689 return int_const_binop (code
, arg0
, arg1
, 0);
1692 return fold_build2 (code
, type
, arg0
, arg1
);
1695 /* Given two values, either both of sizetype or both of bitsizetype,
1696 compute the difference between the two values. Return the value
1697 in signed type corresponding to the type of the operands. */
1700 size_diffop (tree arg0
, tree arg1
)
1702 tree type
= TREE_TYPE (arg0
);
1705 gcc_assert (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
)
1706 && type
== TREE_TYPE (arg1
));
1708 /* If the type is already signed, just do the simple thing. */
1709 if (!TYPE_UNSIGNED (type
))
1710 return size_binop (MINUS_EXPR
, arg0
, arg1
);
1712 ctype
= type
== bitsizetype
? sbitsizetype
: ssizetype
;
1714 /* If either operand is not a constant, do the conversions to the signed
1715 type and subtract. The hardware will do the right thing with any
1716 overflow in the subtraction. */
1717 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1718 return size_binop (MINUS_EXPR
, fold_convert (ctype
, arg0
),
1719 fold_convert (ctype
, arg1
));
1721 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1722 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1723 overflow) and negate (which can't either). Special-case a result
1724 of zero while we're here. */
1725 if (tree_int_cst_equal (arg0
, arg1
))
1726 return fold_convert (ctype
, integer_zero_node
);
1727 else if (tree_int_cst_lt (arg1
, arg0
))
1728 return fold_convert (ctype
, size_binop (MINUS_EXPR
, arg0
, arg1
));
1730 return size_binop (MINUS_EXPR
, fold_convert (ctype
, integer_zero_node
),
1731 fold_convert (ctype
, size_binop (MINUS_EXPR
,
1735 /* A subroutine of fold_convert_const handling conversions of an
1736 INTEGER_CST to another integer type. */
1739 fold_convert_const_int_from_int (tree type
, tree arg1
)
1743 /* Given an integer constant, make new constant with new type,
1744 appropriately sign-extended or truncated. */
1745 t
= build_int_cst_wide (type
, TREE_INT_CST_LOW (arg1
),
1746 TREE_INT_CST_HIGH (arg1
));
1748 t
= force_fit_type (t
,
1749 /* Don't set the overflow when
1750 converting a pointer */
1751 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
1752 (TREE_INT_CST_HIGH (arg1
) < 0
1753 && (TYPE_UNSIGNED (type
)
1754 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1755 | TREE_OVERFLOW (arg1
),
1756 TREE_CONSTANT_OVERFLOW (arg1
));
1761 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1762 to an integer type. */
1765 fold_convert_const_int_from_real (enum tree_code code
, tree type
, tree arg1
)
1770 /* The following code implements the floating point to integer
1771 conversion rules required by the Java Language Specification,
1772 that IEEE NaNs are mapped to zero and values that overflow
1773 the target precision saturate, i.e. values greater than
1774 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1775 are mapped to INT_MIN. These semantics are allowed by the
1776 C and C++ standards that simply state that the behavior of
1777 FP-to-integer conversion is unspecified upon overflow. */
1779 HOST_WIDE_INT high
, low
;
1781 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1785 case FIX_TRUNC_EXPR
:
1786 real_trunc (&r
, VOIDmode
, &x
);
1790 real_ceil (&r
, VOIDmode
, &x
);
1793 case FIX_FLOOR_EXPR
:
1794 real_floor (&r
, VOIDmode
, &x
);
1797 case FIX_ROUND_EXPR
:
1798 real_round (&r
, VOIDmode
, &x
);
1805 /* If R is NaN, return zero and show we have an overflow. */
1806 if (REAL_VALUE_ISNAN (r
))
1813 /* See if R is less than the lower bound or greater than the
1818 tree lt
= TYPE_MIN_VALUE (type
);
1819 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1820 if (REAL_VALUES_LESS (r
, l
))
1823 high
= TREE_INT_CST_HIGH (lt
);
1824 low
= TREE_INT_CST_LOW (lt
);
1830 tree ut
= TYPE_MAX_VALUE (type
);
1833 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1834 if (REAL_VALUES_LESS (u
, r
))
1837 high
= TREE_INT_CST_HIGH (ut
);
1838 low
= TREE_INT_CST_LOW (ut
);
1844 REAL_VALUE_TO_INT (&low
, &high
, r
);
1846 t
= build_int_cst_wide (type
, low
, high
);
1848 t
= force_fit_type (t
, -1, overflow
| TREE_OVERFLOW (arg1
),
1849 TREE_CONSTANT_OVERFLOW (arg1
));
1853 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1854 to another floating point type. */
1857 fold_convert_const_real_from_real (tree type
, tree arg1
)
1859 REAL_VALUE_TYPE value
;
1862 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
1863 t
= build_real (type
, value
);
1865 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1866 TREE_CONSTANT_OVERFLOW (t
)
1867 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg1
);
1871 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1872 type TYPE. If no simplification can be done return NULL_TREE. */
1875 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
1877 if (TREE_TYPE (arg1
) == type
)
1880 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
))
1882 if (TREE_CODE (arg1
) == INTEGER_CST
)
1883 return fold_convert_const_int_from_int (type
, arg1
);
1884 else if (TREE_CODE (arg1
) == REAL_CST
)
1885 return fold_convert_const_int_from_real (code
, type
, arg1
);
1887 else if (TREE_CODE (type
) == REAL_TYPE
)
1889 if (TREE_CODE (arg1
) == INTEGER_CST
)
1890 return build_real_from_int_cst (type
, arg1
);
1891 if (TREE_CODE (arg1
) == REAL_CST
)
1892 return fold_convert_const_real_from_real (type
, arg1
);
1897 /* Construct a vector of zero elements of vector type TYPE. */
1900 build_zero_vector (tree type
)
1905 elem
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
1906 units
= TYPE_VECTOR_SUBPARTS (type
);
1909 for (i
= 0; i
< units
; i
++)
1910 list
= tree_cons (NULL_TREE
, elem
, list
);
1911 return build_vector (type
, list
);
1914 /* Convert expression ARG to type TYPE. Used by the middle-end for
1915 simple conversions in preference to calling the front-end's convert. */
1918 fold_convert (tree type
, tree arg
)
1920 tree orig
= TREE_TYPE (arg
);
1926 if (TREE_CODE (arg
) == ERROR_MARK
1927 || TREE_CODE (type
) == ERROR_MARK
1928 || TREE_CODE (orig
) == ERROR_MARK
)
1929 return error_mark_node
;
1931 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
)
1932 || lang_hooks
.types_compatible_p (TYPE_MAIN_VARIANT (type
),
1933 TYPE_MAIN_VARIANT (orig
)))
1934 return fold_build1 (NOP_EXPR
, type
, arg
);
1936 switch (TREE_CODE (type
))
1938 case INTEGER_TYPE
: case CHAR_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1939 case POINTER_TYPE
: case REFERENCE_TYPE
:
1941 if (TREE_CODE (arg
) == INTEGER_CST
)
1943 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1944 if (tem
!= NULL_TREE
)
1947 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1948 || TREE_CODE (orig
) == OFFSET_TYPE
)
1949 return fold_build1 (NOP_EXPR
, type
, arg
);
1950 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
1952 tem
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1953 return fold_convert (type
, tem
);
1955 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
1956 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1957 return fold_build1 (NOP_EXPR
, type
, arg
);
1960 if (TREE_CODE (arg
) == INTEGER_CST
)
1962 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
1963 if (tem
!= NULL_TREE
)
1966 else if (TREE_CODE (arg
) == REAL_CST
)
1968 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1969 if (tem
!= NULL_TREE
)
1973 switch (TREE_CODE (orig
))
1975 case INTEGER_TYPE
: case CHAR_TYPE
:
1976 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1977 case POINTER_TYPE
: case REFERENCE_TYPE
:
1978 return fold_build1 (FLOAT_EXPR
, type
, arg
);
1981 return fold_build1 (flag_float_store
? CONVERT_EXPR
: NOP_EXPR
,
1985 tem
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1986 return fold_convert (type
, tem
);
1993 switch (TREE_CODE (orig
))
1995 case INTEGER_TYPE
: case CHAR_TYPE
:
1996 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1997 case POINTER_TYPE
: case REFERENCE_TYPE
:
1999 return build2 (COMPLEX_EXPR
, type
,
2000 fold_convert (TREE_TYPE (type
), arg
),
2001 fold_convert (TREE_TYPE (type
), integer_zero_node
));
2006 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
2008 rpart
= fold_convert (TREE_TYPE (type
), TREE_OPERAND (arg
, 0));
2009 ipart
= fold_convert (TREE_TYPE (type
), TREE_OPERAND (arg
, 1));
2010 return fold_build2 (COMPLEX_EXPR
, type
, rpart
, ipart
);
2013 arg
= save_expr (arg
);
2014 rpart
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2015 ipart
= fold_build1 (IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
2016 rpart
= fold_convert (TREE_TYPE (type
), rpart
);
2017 ipart
= fold_convert (TREE_TYPE (type
), ipart
);
2018 return fold_build2 (COMPLEX_EXPR
, type
, rpart
, ipart
);
2026 if (integer_zerop (arg
))
2027 return build_zero_vector (type
);
2028 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2029 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2030 || TREE_CODE (orig
) == VECTOR_TYPE
);
2031 return fold_build1 (VIEW_CONVERT_EXPR
, type
, arg
);
2034 return fold_build1 (CONVERT_EXPR
, type
, fold_ignored_result (arg
));
2041 /* Return false if expr can be assumed not to be an lvalue, true
2045 maybe_lvalue_p (tree x
)
2047 /* We only need to wrap lvalue tree codes. */
2048 switch (TREE_CODE (x
))
2059 case ALIGN_INDIRECT_REF
:
2060 case MISALIGNED_INDIRECT_REF
:
2062 case ARRAY_RANGE_REF
:
2068 case PREINCREMENT_EXPR
:
2069 case PREDECREMENT_EXPR
:
2071 case TRY_CATCH_EXPR
:
2072 case WITH_CLEANUP_EXPR
:
2083 /* Assume the worst for front-end tree codes. */
2084 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2092 /* Return an expr equal to X but certainly not valid as an lvalue. */
2097 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2102 if (! maybe_lvalue_p (x
))
2104 return build1 (NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2107 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2108 Zero means allow extended lvalues. */
2110 int pedantic_lvalues
;
2112 /* When pedantic, return an expr equal to X but certainly not valid as a
2113 pedantic lvalue. Otherwise, return X. */
2116 pedantic_non_lvalue (tree x
)
2118 if (pedantic_lvalues
)
2119 return non_lvalue (x
);
2124 /* Given a tree comparison code, return the code that is the logical inverse
2125 of the given code. It is not safe to do this for floating-point
2126 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2127 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2130 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2132 if (honor_nans
&& flag_trapping_math
)
2142 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2144 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2146 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2148 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2162 return UNORDERED_EXPR
;
2163 case UNORDERED_EXPR
:
2164 return ORDERED_EXPR
;
2170 /* Similar, but return the comparison that results if the operands are
2171 swapped. This is safe for floating-point. */
2174 swap_tree_comparison (enum tree_code code
)
2181 case UNORDERED_EXPR
:
2207 /* Convert a comparison tree code from an enum tree_code representation
2208 into a compcode bit-based encoding. This function is the inverse of
2209 compcode_to_comparison. */
2211 static enum comparison_code
2212 comparison_to_compcode (enum tree_code code
)
2229 return COMPCODE_ORD
;
2230 case UNORDERED_EXPR
:
2231 return COMPCODE_UNORD
;
2233 return COMPCODE_UNLT
;
2235 return COMPCODE_UNEQ
;
2237 return COMPCODE_UNLE
;
2239 return COMPCODE_UNGT
;
2241 return COMPCODE_LTGT
;
2243 return COMPCODE_UNGE
;
2249 /* Convert a compcode bit-based encoding of a comparison operator back
2250 to GCC's enum tree_code representation. This function is the
2251 inverse of comparison_to_compcode. */
2253 static enum tree_code
2254 compcode_to_comparison (enum comparison_code code
)
2271 return ORDERED_EXPR
;
2272 case COMPCODE_UNORD
:
2273 return UNORDERED_EXPR
;
2291 /* Return a tree for the comparison which is the combination of
2292 doing the AND or OR (depending on CODE) of the two operations LCODE
2293 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2294 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2295 if this makes the transformation invalid. */
2298 combine_comparisons (enum tree_code code
, enum tree_code lcode
,
2299 enum tree_code rcode
, tree truth_type
,
2300 tree ll_arg
, tree lr_arg
)
2302 bool honor_nans
= HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg
)));
2303 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2304 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2305 enum comparison_code compcode
;
2309 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2310 compcode
= lcompcode
& rcompcode
;
2313 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2314 compcode
= lcompcode
| rcompcode
;
2323 /* Eliminate unordered comparisons, as well as LTGT and ORD
2324 which are not used unless the mode has NaNs. */
2325 compcode
&= ~COMPCODE_UNORD
;
2326 if (compcode
== COMPCODE_LTGT
)
2327 compcode
= COMPCODE_NE
;
2328 else if (compcode
== COMPCODE_ORD
)
2329 compcode
= COMPCODE_TRUE
;
2331 else if (flag_trapping_math
)
2333 /* Check that the original operation and the optimized ones will trap
2334 under the same condition. */
2335 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2336 && (lcompcode
!= COMPCODE_EQ
)
2337 && (lcompcode
!= COMPCODE_ORD
);
2338 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2339 && (rcompcode
!= COMPCODE_EQ
)
2340 && (rcompcode
!= COMPCODE_ORD
);
2341 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2342 && (compcode
!= COMPCODE_EQ
)
2343 && (compcode
!= COMPCODE_ORD
);
2345 /* In a short-circuited boolean expression the LHS might be
2346 such that the RHS, if evaluated, will never trap. For
2347 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2348 if neither x nor y is NaN. (This is a mixed blessing: for
2349 example, the expression above will never trap, hence
2350 optimizing it to x < y would be invalid). */
2351 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2352 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2355 /* If the comparison was short-circuited, and only the RHS
2356 trapped, we may now generate a spurious trap. */
2358 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2361 /* If we changed the conditions that cause a trap, we lose. */
2362 if ((ltrap
|| rtrap
) != trap
)
2366 if (compcode
== COMPCODE_TRUE
)
2367 return constant_boolean_node (true, truth_type
);
2368 else if (compcode
== COMPCODE_FALSE
)
2369 return constant_boolean_node (false, truth_type
);
2371 return fold_build2 (compcode_to_comparison (compcode
),
2372 truth_type
, ll_arg
, lr_arg
);
2375 /* Return nonzero if CODE is a tree code that represents a truth value. */
2378 truth_value_p (enum tree_code code
)
2380 return (TREE_CODE_CLASS (code
) == tcc_comparison
2381 || code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
2382 || code
== TRUTH_OR_EXPR
|| code
== TRUTH_ORIF_EXPR
2383 || code
== TRUTH_XOR_EXPR
|| code
== TRUTH_NOT_EXPR
);
2386 /* Return nonzero if two operands (typically of the same tree node)
2387 are necessarily equal. If either argument has side-effects this
2388 function returns zero. FLAGS modifies behavior as follows:
2390 If OEP_ONLY_CONST is set, only return nonzero for constants.
2391 This function tests whether the operands are indistinguishable;
2392 it does not test whether they are equal using C's == operation.
2393 The distinction is important for IEEE floating point, because
2394 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2395 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2397 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2398 even though it may hold multiple values during a function.
2399 This is because a GCC tree node guarantees that nothing else is
2400 executed between the evaluation of its "operands" (which may often
2401 be evaluated in arbitrary order). Hence if the operands themselves
2402 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2403 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2404 unset means assuming isochronic (or instantaneous) tree equivalence.
2405 Unless comparing arbitrary expression trees, such as from different
2406 statements, this flag can usually be left unset.
2408 If OEP_PURE_SAME is set, then pure functions with identical arguments
2409 are considered the same. It is used when the caller has other ways
2410 to ensure that global memory is unchanged in between. */
2413 operand_equal_p (tree arg0
, tree arg1
, unsigned int flags
)
2415 /* If either is ERROR_MARK, they aren't equal. */
2416 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
)
2419 /* If both types don't have the same signedness, then we can't consider
2420 them equal. We must check this before the STRIP_NOPS calls
2421 because they may change the signedness of the arguments. */
2422 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2425 /* If both types don't have the same precision, then it is not safe
2427 if (TYPE_PRECISION (TREE_TYPE (arg0
)) != TYPE_PRECISION (TREE_TYPE (arg1
)))
2433 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2434 /* This is needed for conversions and for COMPONENT_REF.
2435 Might as well play it safe and always test this. */
2436 || TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2437 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2438 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
2441 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2442 We don't care about side effects in that case because the SAVE_EXPR
2443 takes care of that for us. In all other cases, two expressions are
2444 equal if they have no side effects. If we have two identical
2445 expressions with side effects that should be treated the same due
2446 to the only side effects being identical SAVE_EXPR's, that will
2447 be detected in the recursive calls below. */
2448 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2449 && (TREE_CODE (arg0
) == SAVE_EXPR
2450 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2453 /* Next handle constant cases, those for which we can return 1 even
2454 if ONLY_CONST is set. */
2455 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2456 switch (TREE_CODE (arg0
))
2459 return (! TREE_CONSTANT_OVERFLOW (arg0
)
2460 && ! TREE_CONSTANT_OVERFLOW (arg1
)
2461 && tree_int_cst_equal (arg0
, arg1
));
2464 return (! TREE_CONSTANT_OVERFLOW (arg0
)
2465 && ! TREE_CONSTANT_OVERFLOW (arg1
)
2466 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
2467 TREE_REAL_CST (arg1
)));
2473 if (TREE_CONSTANT_OVERFLOW (arg0
)
2474 || TREE_CONSTANT_OVERFLOW (arg1
))
2477 v1
= TREE_VECTOR_CST_ELTS (arg0
);
2478 v2
= TREE_VECTOR_CST_ELTS (arg1
);
2481 if (!operand_equal_p (TREE_VALUE (v1
), TREE_VALUE (v2
),
2484 v1
= TREE_CHAIN (v1
);
2485 v2
= TREE_CHAIN (v2
);
2492 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2494 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2498 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2499 && ! memcmp (TREE_STRING_POINTER (arg0
),
2500 TREE_STRING_POINTER (arg1
),
2501 TREE_STRING_LENGTH (arg0
)));
2504 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2510 if (flags
& OEP_ONLY_CONST
)
2513 /* Define macros to test an operand from arg0 and arg1 for equality and a
2514 variant that allows null and views null as being different from any
2515 non-null value. In the latter case, if either is null, the both
2516 must be; otherwise, do the normal comparison. */
2517 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2518 TREE_OPERAND (arg1, N), flags)
2520 #define OP_SAME_WITH_NULL(N) \
2521 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2522 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2524 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2527 /* Two conversions are equal only if signedness and modes match. */
2528 switch (TREE_CODE (arg0
))
2533 case FIX_TRUNC_EXPR
:
2534 case FIX_FLOOR_EXPR
:
2535 case FIX_ROUND_EXPR
:
2536 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
2537 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2547 case tcc_comparison
:
2549 if (OP_SAME (0) && OP_SAME (1))
2552 /* For commutative ops, allow the other order. */
2553 return (commutative_tree_code (TREE_CODE (arg0
))
2554 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2555 TREE_OPERAND (arg1
, 1), flags
)
2556 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2557 TREE_OPERAND (arg1
, 0), flags
));
2560 /* If either of the pointer (or reference) expressions we are
2561 dereferencing contain a side effect, these cannot be equal. */
2562 if (TREE_SIDE_EFFECTS (arg0
)
2563 || TREE_SIDE_EFFECTS (arg1
))
2566 switch (TREE_CODE (arg0
))
2569 case ALIGN_INDIRECT_REF
:
2570 case MISALIGNED_INDIRECT_REF
:
2576 case ARRAY_RANGE_REF
:
2577 /* Operands 2 and 3 may be null. */
2580 && OP_SAME_WITH_NULL (2)
2581 && OP_SAME_WITH_NULL (3));
2584 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2585 may be NULL when we're called to compare MEM_EXPRs. */
2586 return OP_SAME_WITH_NULL (0)
2588 && OP_SAME_WITH_NULL (2);
2591 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2597 case tcc_expression
:
2598 switch (TREE_CODE (arg0
))
2601 case TRUTH_NOT_EXPR
:
2604 case TRUTH_ANDIF_EXPR
:
2605 case TRUTH_ORIF_EXPR
:
2606 return OP_SAME (0) && OP_SAME (1);
2608 case TRUTH_AND_EXPR
:
2610 case TRUTH_XOR_EXPR
:
2611 if (OP_SAME (0) && OP_SAME (1))
2614 /* Otherwise take into account this is a commutative operation. */
2615 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2616 TREE_OPERAND (arg1
, 1), flags
)
2617 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2618 TREE_OPERAND (arg1
, 0), flags
));
2621 /* If the CALL_EXPRs call different functions, then they
2622 clearly can not be equal. */
2627 unsigned int cef
= call_expr_flags (arg0
);
2628 if (flags
& OEP_PURE_SAME
)
2629 cef
&= ECF_CONST
| ECF_PURE
;
2636 /* Now see if all the arguments are the same. operand_equal_p
2637 does not handle TREE_LIST, so we walk the operands here
2638 feeding them to operand_equal_p. */
2639 arg0
= TREE_OPERAND (arg0
, 1);
2640 arg1
= TREE_OPERAND (arg1
, 1);
2641 while (arg0
&& arg1
)
2643 if (! operand_equal_p (TREE_VALUE (arg0
), TREE_VALUE (arg1
),
2647 arg0
= TREE_CHAIN (arg0
);
2648 arg1
= TREE_CHAIN (arg1
);
2651 /* If we get here and both argument lists are exhausted
2652 then the CALL_EXPRs are equal. */
2653 return ! (arg0
|| arg1
);
2659 case tcc_declaration
:
2660 /* Consider __builtin_sqrt equal to sqrt. */
2661 return (TREE_CODE (arg0
) == FUNCTION_DECL
2662 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
2663 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
2664 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
2671 #undef OP_SAME_WITH_NULL
2674 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2675 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2677 When in doubt, return 0. */
2680 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
2682 int unsignedp1
, unsignedpo
;
2683 tree primarg0
, primarg1
, primother
;
2684 unsigned int correct_width
;
2686 if (operand_equal_p (arg0
, arg1
, 0))
2689 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
2690 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
2693 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2694 and see if the inner values are the same. This removes any
2695 signedness comparison, which doesn't matter here. */
2696 primarg0
= arg0
, primarg1
= arg1
;
2697 STRIP_NOPS (primarg0
);
2698 STRIP_NOPS (primarg1
);
2699 if (operand_equal_p (primarg0
, primarg1
, 0))
2702 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2703 actual comparison operand, ARG0.
2705 First throw away any conversions to wider types
2706 already present in the operands. */
2708 primarg1
= get_narrower (arg1
, &unsignedp1
);
2709 primother
= get_narrower (other
, &unsignedpo
);
2711 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
2712 if (unsignedp1
== unsignedpo
2713 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
2714 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
2716 tree type
= TREE_TYPE (arg0
);
2718 /* Make sure shorter operand is extended the right way
2719 to match the longer operand. */
2720 primarg1
= fold_convert (lang_hooks
.types
.signed_or_unsigned_type
2721 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
2723 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
2730 /* See if ARG is an expression that is either a comparison or is performing
2731 arithmetic on comparisons. The comparisons must only be comparing
2732 two different values, which will be stored in *CVAL1 and *CVAL2; if
2733 they are nonzero it means that some operands have already been found.
2734 No variables may be used anywhere else in the expression except in the
2735 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2736 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2738 If this is true, return 1. Otherwise, return zero. */
2741 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
2743 enum tree_code code
= TREE_CODE (arg
);
2744 enum tree_code_class
class = TREE_CODE_CLASS (code
);
2746 /* We can handle some of the tcc_expression cases here. */
2747 if (class == tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2749 else if (class == tcc_expression
2750 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
2751 || code
== COMPOUND_EXPR
))
2754 else if (class == tcc_expression
&& code
== SAVE_EXPR
2755 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
2757 /* If we've already found a CVAL1 or CVAL2, this expression is
2758 two complex to handle. */
2759 if (*cval1
|| *cval2
)
2769 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
2772 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
2773 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2774 cval1
, cval2
, save_p
));
2779 case tcc_expression
:
2780 if (code
== COND_EXPR
)
2781 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
2782 cval1
, cval2
, save_p
)
2783 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2784 cval1
, cval2
, save_p
)
2785 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
2786 cval1
, cval2
, save_p
));
2789 case tcc_comparison
:
2790 /* First see if we can handle the first operand, then the second. For
2791 the second operand, we know *CVAL1 can't be zero. It must be that
2792 one side of the comparison is each of the values; test for the
2793 case where this isn't true by failing if the two operands
2796 if (operand_equal_p (TREE_OPERAND (arg
, 0),
2797 TREE_OPERAND (arg
, 1), 0))
2801 *cval1
= TREE_OPERAND (arg
, 0);
2802 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
2804 else if (*cval2
== 0)
2805 *cval2
= TREE_OPERAND (arg
, 0);
2806 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
2811 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
2813 else if (*cval2
== 0)
2814 *cval2
= TREE_OPERAND (arg
, 1);
2815 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
2827 /* ARG is a tree that is known to contain just arithmetic operations and
2828 comparisons. Evaluate the operations in the tree substituting NEW0 for
2829 any occurrence of OLD0 as an operand of a comparison and likewise for
2833 eval_subst (tree arg
, tree old0
, tree new0
, tree old1
, tree new1
)
2835 tree type
= TREE_TYPE (arg
);
2836 enum tree_code code
= TREE_CODE (arg
);
2837 enum tree_code_class
class = TREE_CODE_CLASS (code
);
2839 /* We can handle some of the tcc_expression cases here. */
2840 if (class == tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2842 else if (class == tcc_expression
2843 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2849 return fold_build1 (code
, type
,
2850 eval_subst (TREE_OPERAND (arg
, 0),
2851 old0
, new0
, old1
, new1
));
2854 return fold_build2 (code
, type
,
2855 eval_subst (TREE_OPERAND (arg
, 0),
2856 old0
, new0
, old1
, new1
),
2857 eval_subst (TREE_OPERAND (arg
, 1),
2858 old0
, new0
, old1
, new1
));
2860 case tcc_expression
:
2864 return eval_subst (TREE_OPERAND (arg
, 0), old0
, new0
, old1
, new1
);
2867 return eval_subst (TREE_OPERAND (arg
, 1), old0
, new0
, old1
, new1
);
2870 return fold_build3 (code
, type
,
2871 eval_subst (TREE_OPERAND (arg
, 0),
2872 old0
, new0
, old1
, new1
),
2873 eval_subst (TREE_OPERAND (arg
, 1),
2874 old0
, new0
, old1
, new1
),
2875 eval_subst (TREE_OPERAND (arg
, 2),
2876 old0
, new0
, old1
, new1
));
2880 /* Fall through - ??? */
2882 case tcc_comparison
:
2884 tree arg0
= TREE_OPERAND (arg
, 0);
2885 tree arg1
= TREE_OPERAND (arg
, 1);
2887 /* We need to check both for exact equality and tree equality. The
2888 former will be true if the operand has a side-effect. In that
2889 case, we know the operand occurred exactly once. */
2891 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
2893 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
2896 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
2898 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
2901 return fold_build2 (code
, type
, arg0
, arg1
);
2909 /* Return a tree for the case when the result of an expression is RESULT
2910 converted to TYPE and OMITTED was previously an operand of the expression
2911 but is now not needed (e.g., we folded OMITTED * 0).
2913 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2914 the conversion of RESULT to TYPE. */
2917 omit_one_operand (tree type
, tree result
, tree omitted
)
2919 tree t
= fold_convert (type
, result
);
2921 if (TREE_SIDE_EFFECTS (omitted
))
2922 return build2 (COMPOUND_EXPR
, type
, fold_ignored_result (omitted
), t
);
2924 return non_lvalue (t
);
2927 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2930 pedantic_omit_one_operand (tree type
, tree result
, tree omitted
)
2932 tree t
= fold_convert (type
, result
);
2934 if (TREE_SIDE_EFFECTS (omitted
))
2935 return build2 (COMPOUND_EXPR
, type
, fold_ignored_result (omitted
), t
);
2937 return pedantic_non_lvalue (t
);
2940 /* Return a tree for the case when the result of an expression is RESULT
2941 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2942 of the expression but are now not needed.
2944 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2945 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2946 evaluated before OMITTED2. Otherwise, if neither has side effects,
2947 just do the conversion of RESULT to TYPE. */
2950 omit_two_operands (tree type
, tree result
, tree omitted1
, tree omitted2
)
2952 tree t
= fold_convert (type
, result
);
2954 if (TREE_SIDE_EFFECTS (omitted2
))
2955 t
= build2 (COMPOUND_EXPR
, type
, omitted2
, t
);
2956 if (TREE_SIDE_EFFECTS (omitted1
))
2957 t
= build2 (COMPOUND_EXPR
, type
, omitted1
, t
);
2959 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue (t
) : t
;
2963 /* Return a simplified tree node for the truth-negation of ARG. This
2964 never alters ARG itself. We assume that ARG is an operation that
2965 returns a truth value (0 or 1).
2967 FIXME: one would think we would fold the result, but it causes
2968 problems with the dominator optimizer. */
2970 invert_truthvalue (tree arg
)
2972 tree type
= TREE_TYPE (arg
);
2973 enum tree_code code
= TREE_CODE (arg
);
2975 if (code
== ERROR_MARK
)
2978 /* If this is a comparison, we can simply invert it, except for
2979 floating-point non-equality comparisons, in which case we just
2980 enclose a TRUTH_NOT_EXPR around what we have. */
2982 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
2984 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
2985 if (FLOAT_TYPE_P (op_type
)
2986 && flag_trapping_math
2987 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
2988 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
2989 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
2992 code
= invert_tree_comparison (code
,
2993 HONOR_NANS (TYPE_MODE (op_type
)));
2994 if (code
== ERROR_MARK
)
2995 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
2997 return build2 (code
, type
,
2998 TREE_OPERAND (arg
, 0), TREE_OPERAND (arg
, 1));
3005 return constant_boolean_node (integer_zerop (arg
), type
);
3007 case TRUTH_AND_EXPR
:
3008 return build2 (TRUTH_OR_EXPR
, type
,
3009 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3010 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3013 return build2 (TRUTH_AND_EXPR
, type
,
3014 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3015 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3017 case TRUTH_XOR_EXPR
:
3018 /* Here we can invert either operand. We invert the first operand
3019 unless the second operand is a TRUTH_NOT_EXPR in which case our
3020 result is the XOR of the first operand with the inside of the
3021 negation of the second operand. */
3023 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3024 return build2 (TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3025 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3027 return build2 (TRUTH_XOR_EXPR
, type
,
3028 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3029 TREE_OPERAND (arg
, 1));
3031 case TRUTH_ANDIF_EXPR
:
3032 return build2 (TRUTH_ORIF_EXPR
, type
,
3033 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3034 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3036 case TRUTH_ORIF_EXPR
:
3037 return build2 (TRUTH_ANDIF_EXPR
, type
,
3038 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3039 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3041 case TRUTH_NOT_EXPR
:
3042 return TREE_OPERAND (arg
, 0);
3046 tree arg1
= TREE_OPERAND (arg
, 1);
3047 tree arg2
= TREE_OPERAND (arg
, 2);
3048 /* A COND_EXPR may have a throw as one operand, which
3049 then has void type. Just leave void operands
3051 return build3 (COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3052 VOID_TYPE_P (TREE_TYPE (arg1
))
3053 ? arg1
: invert_truthvalue (arg1
),
3054 VOID_TYPE_P (TREE_TYPE (arg2
))
3055 ? arg2
: invert_truthvalue (arg2
));
3059 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3060 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3062 case NON_LVALUE_EXPR
:
3063 return invert_truthvalue (TREE_OPERAND (arg
, 0));
3066 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3071 return build1 (TREE_CODE (arg
), type
,
3072 invert_truthvalue (TREE_OPERAND (arg
, 0)));
3075 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3077 return build2 (EQ_EXPR
, type
, arg
,
3078 fold_convert (type
, integer_zero_node
));
3081 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
3083 case CLEANUP_POINT_EXPR
:
3084 return build1 (CLEANUP_POINT_EXPR
, type
,
3085 invert_truthvalue (TREE_OPERAND (arg
, 0)));
3090 gcc_assert (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
);
3091 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
3094 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3095 operands are another bit-wise operation with a common input. If so,
3096 distribute the bit operations to save an operation and possibly two if
3097 constants are involved. For example, convert
3098 (A | B) & (A | C) into A | (B & C)
3099 Further simplification will occur if B and C are constants.
3101 If this optimization cannot be done, 0 will be returned. */
3104 distribute_bit_expr (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
3109 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3110 || TREE_CODE (arg0
) == code
3111 || (TREE_CODE (arg0
) != BIT_AND_EXPR
3112 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
3115 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
3117 common
= TREE_OPERAND (arg0
, 0);
3118 left
= TREE_OPERAND (arg0
, 1);
3119 right
= TREE_OPERAND (arg1
, 1);
3121 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
3123 common
= TREE_OPERAND (arg0
, 0);
3124 left
= TREE_OPERAND (arg0
, 1);
3125 right
= TREE_OPERAND (arg1
, 0);
3127 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
3129 common
= TREE_OPERAND (arg0
, 1);
3130 left
= TREE_OPERAND (arg0
, 0);
3131 right
= TREE_OPERAND (arg1
, 1);
3133 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
3135 common
= TREE_OPERAND (arg0
, 1);
3136 left
= TREE_OPERAND (arg0
, 0);
3137 right
= TREE_OPERAND (arg1
, 0);
3142 return fold_build2 (TREE_CODE (arg0
), type
, common
,
3143 fold_build2 (code
, type
, left
, right
));
3146 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3147 with code CODE. This optimization is unsafe. */
3149 distribute_real_division (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
3151 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3152 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3154 /* (A / C) +- (B / C) -> (A +- B) / C. */
3156 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3157 TREE_OPERAND (arg1
, 1), 0))
3158 return fold_build2 (mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3159 fold_build2 (code
, type
,
3160 TREE_OPERAND (arg0
, 0),
3161 TREE_OPERAND (arg1
, 0)),
3162 TREE_OPERAND (arg0
, 1));
3164 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3165 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3166 TREE_OPERAND (arg1
, 0), 0)
3167 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3168 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3170 REAL_VALUE_TYPE r0
, r1
;
3171 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3172 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3174 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3176 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3177 real_arithmetic (&r0
, code
, &r0
, &r1
);
3178 return fold_build2 (MULT_EXPR
, type
,
3179 TREE_OPERAND (arg0
, 0),
3180 build_real (type
, r0
));
3186 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3187 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3190 make_bit_field_ref (tree inner
, tree type
, int bitsize
, int bitpos
,
3197 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3198 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3199 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3200 && host_integerp (size
, 0)
3201 && tree_low_cst (size
, 0) == bitsize
)
3202 return fold_convert (type
, inner
);
3205 result
= build3 (BIT_FIELD_REF
, type
, inner
,
3206 size_int (bitsize
), bitsize_int (bitpos
));
3208 BIT_FIELD_REF_UNSIGNED (result
) = unsignedp
;
3213 /* Optimize a bit-field compare.
3215 There are two cases: First is a compare against a constant and the
3216 second is a comparison of two items where the fields are at the same
3217 bit position relative to the start of a chunk (byte, halfword, word)
3218 large enough to contain it. In these cases we can avoid the shift
3219 implicit in bitfield extractions.
3221 For constants, we emit a compare of the shifted constant with the
3222 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3223 compared. For two fields at the same position, we do the ANDs with the
3224 similar mask and compare the result of the ANDs.
3226 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3227 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3228 are the left and right operands of the comparison, respectively.
3230 If the optimization described above can be done, we return the resulting
3231 tree. Otherwise we return zero. */
3234 optimize_bit_field_compare (enum tree_code code
, tree compare_type
,
3237 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3238 tree type
= TREE_TYPE (lhs
);
3239 tree signed_type
, unsigned_type
;
3240 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3241 enum machine_mode lmode
, rmode
, nmode
;
3242 int lunsignedp
, runsignedp
;
3243 int lvolatilep
= 0, rvolatilep
= 0;
3244 tree linner
, rinner
= NULL_TREE
;
3248 /* Get all the information about the extractions being done. If the bit size
3249 if the same as the size of the underlying object, we aren't doing an
3250 extraction at all and so can do nothing. We also don't want to
3251 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3252 then will no longer be able to replace it. */
3253 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3254 &lunsignedp
, &lvolatilep
, false);
3255 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3256 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
)
3261 /* If this is not a constant, we can only do something if bit positions,
3262 sizes, and signedness are the same. */
3263 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3264 &runsignedp
, &rvolatilep
, false);
3266 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3267 || lunsignedp
!= runsignedp
|| offset
!= 0
3268 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
)
3272 /* See if we can find a mode to refer to this field. We should be able to,
3273 but fail if we can't. */
3274 nmode
= get_best_mode (lbitsize
, lbitpos
,
3275 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3276 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3277 TYPE_ALIGN (TREE_TYPE (rinner
))),
3278 word_mode
, lvolatilep
|| rvolatilep
);
3279 if (nmode
== VOIDmode
)
3282 /* Set signed and unsigned types of the precision of this mode for the
3284 signed_type
= lang_hooks
.types
.type_for_mode (nmode
, 0);
3285 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3287 /* Compute the bit position and size for the new reference and our offset
3288 within it. If the new reference is the same size as the original, we
3289 won't optimize anything, so return zero. */
3290 nbitsize
= GET_MODE_BITSIZE (nmode
);
3291 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3293 if (nbitsize
== lbitsize
)
3296 if (BYTES_BIG_ENDIAN
)
3297 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3299 /* Make the mask to be used against the extracted field. */
3300 mask
= build_int_cst (unsigned_type
, -1);
3301 mask
= force_fit_type (mask
, 0, false, false);
3302 mask
= fold_convert (unsigned_type
, mask
);
3303 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
), 0);
3304 mask
= const_binop (RSHIFT_EXPR
, mask
,
3305 size_int (nbitsize
- lbitsize
- lbitpos
), 0);
3308 /* If not comparing with constant, just rework the comparison
3310 return build2 (code
, compare_type
,
3311 build2 (BIT_AND_EXPR
, unsigned_type
,
3312 make_bit_field_ref (linner
, unsigned_type
,
3313 nbitsize
, nbitpos
, 1),
3315 build2 (BIT_AND_EXPR
, unsigned_type
,
3316 make_bit_field_ref (rinner
, unsigned_type
,
3317 nbitsize
, nbitpos
, 1),
3320 /* Otherwise, we are handling the constant case. See if the constant is too
3321 big for the field. Warn and return a tree of for 0 (false) if so. We do
3322 this not only for its own sake, but to avoid having to test for this
3323 error case below. If we didn't, we might generate wrong code.
3325 For unsigned fields, the constant shifted right by the field length should
3326 be all zero. For signed fields, the high-order bits should agree with
3331 if (! integer_zerop (const_binop (RSHIFT_EXPR
,
3332 fold_convert (unsigned_type
, rhs
),
3333 size_int (lbitsize
), 0)))
3335 warning (0, "comparison is always %d due to width of bit-field",
3337 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3342 tree tem
= const_binop (RSHIFT_EXPR
, fold_convert (signed_type
, rhs
),
3343 size_int (lbitsize
- 1), 0);
3344 if (! integer_zerop (tem
) && ! integer_all_onesp (tem
))
3346 warning (0, "comparison is always %d due to width of bit-field",
3348 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3352 /* Single-bit compares should always be against zero. */
3353 if (lbitsize
== 1 && ! integer_zerop (rhs
))
3355 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3356 rhs
= fold_convert (type
, integer_zero_node
);
3359 /* Make a new bitfield reference, shift the constant over the
3360 appropriate number of bits and mask it with the computed mask
3361 (in case this was a signed field). If we changed it, make a new one. */
3362 lhs
= make_bit_field_ref (linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
3365 TREE_SIDE_EFFECTS (lhs
) = 1;
3366 TREE_THIS_VOLATILE (lhs
) = 1;
3369 rhs
= const_binop (BIT_AND_EXPR
,
3370 const_binop (LSHIFT_EXPR
,
3371 fold_convert (unsigned_type
, rhs
),
3372 size_int (lbitpos
), 0),
3375 return build2 (code
, compare_type
,
3376 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
),
3380 /* Subroutine for fold_truthop: decode a field reference.
3382 If EXP is a comparison reference, we return the innermost reference.
3384 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3385 set to the starting bit number.
3387 If the innermost field can be completely contained in a mode-sized
3388 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3390 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3391 otherwise it is not changed.
3393 *PUNSIGNEDP is set to the signedness of the field.
3395 *PMASK is set to the mask used. This is either contained in a
3396 BIT_AND_EXPR or derived from the width of the field.
3398 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3400 Return 0 if this is not a component reference or is one that we can't
3401 do anything with. */
3404 decode_field_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
3405 HOST_WIDE_INT
*pbitpos
, enum machine_mode
*pmode
,
3406 int *punsignedp
, int *pvolatilep
,
3407 tree
*pmask
, tree
*pand_mask
)
3409 tree outer_type
= 0;
3411 tree mask
, inner
, offset
;
3413 unsigned int precision
;
3415 /* All the optimizations using this function assume integer fields.
3416 There are problems with FP fields since the type_for_size call
3417 below can fail for, e.g., XFmode. */
3418 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3421 /* We are interested in the bare arrangement of bits, so strip everything
3422 that doesn't affect the machine mode. However, record the type of the
3423 outermost expression if it may matter below. */
3424 if (TREE_CODE (exp
) == NOP_EXPR
3425 || TREE_CODE (exp
) == CONVERT_EXPR
3426 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3427 outer_type
= TREE_TYPE (exp
);
3430 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3432 and_mask
= TREE_OPERAND (exp
, 1);
3433 exp
= TREE_OPERAND (exp
, 0);
3434 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3435 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3439 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3440 punsignedp
, pvolatilep
, false);
3441 if ((inner
== exp
&& and_mask
== 0)
3442 || *pbitsize
< 0 || offset
!= 0
3443 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3446 /* If the number of bits in the reference is the same as the bitsize of
3447 the outer type, then the outer type gives the signedness. Otherwise
3448 (in case of a small bitfield) the signedness is unchanged. */
3449 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
3450 *punsignedp
= TYPE_UNSIGNED (outer_type
);
3452 /* Compute the mask to access the bitfield. */
3453 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3454 precision
= TYPE_PRECISION (unsigned_type
);
3456 mask
= build_int_cst (unsigned_type
, -1);
3457 mask
= force_fit_type (mask
, 0, false, false);
3459 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
3460 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
3462 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3464 mask
= fold_build2 (BIT_AND_EXPR
, unsigned_type
,
3465 fold_convert (unsigned_type
, and_mask
), mask
);
3468 *pand_mask
= and_mask
;
3472 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3476 all_ones_mask_p (tree mask
, int size
)
3478 tree type
= TREE_TYPE (mask
);
3479 unsigned int precision
= TYPE_PRECISION (type
);
3482 tmask
= build_int_cst (lang_hooks
.types
.signed_type (type
), -1);
3483 tmask
= force_fit_type (tmask
, 0, false, false);
3486 tree_int_cst_equal (mask
,
3487 const_binop (RSHIFT_EXPR
,
3488 const_binop (LSHIFT_EXPR
, tmask
,
3489 size_int (precision
- size
),
3491 size_int (precision
- size
), 0));
3494 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3495 represents the sign bit of EXP's type. If EXP represents a sign
3496 or zero extension, also test VAL against the unextended type.
3497 The return value is the (sub)expression whose sign bit is VAL,
3498 or NULL_TREE otherwise. */
3501 sign_bit_p (tree exp
, tree val
)
3503 unsigned HOST_WIDE_INT mask_lo
, lo
;
3504 HOST_WIDE_INT mask_hi
, hi
;
3508 /* Tree EXP must have an integral type. */
3509 t
= TREE_TYPE (exp
);
3510 if (! INTEGRAL_TYPE_P (t
))
3513 /* Tree VAL must be an integer constant. */
3514 if (TREE_CODE (val
) != INTEGER_CST
3515 || TREE_CONSTANT_OVERFLOW (val
))
3518 width
= TYPE_PRECISION (t
);
3519 if (width
> HOST_BITS_PER_WIDE_INT
)
3521 hi
= (unsigned HOST_WIDE_INT
) 1 << (width
- HOST_BITS_PER_WIDE_INT
- 1);
3524 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
3525 >> (2 * HOST_BITS_PER_WIDE_INT
- width
));
3531 lo
= (unsigned HOST_WIDE_INT
) 1 << (width
- 1);
3534 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
3535 >> (HOST_BITS_PER_WIDE_INT
- width
));
3538 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3539 treat VAL as if it were unsigned. */
3540 if ((TREE_INT_CST_HIGH (val
) & mask_hi
) == hi
3541 && (TREE_INT_CST_LOW (val
) & mask_lo
) == lo
)
3544 /* Handle extension from a narrower type. */
3545 if (TREE_CODE (exp
) == NOP_EXPR
3546 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
3547 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
3552 /* Subroutine for fold_truthop: determine if an operand is simple enough
3553 to be evaluated unconditionally. */
3556 simple_operand_p (tree exp
)
3558 /* Strip any conversions that don't change the machine mode. */
3561 return (CONSTANT_CLASS_P (exp
)
3562 || TREE_CODE (exp
) == SSA_NAME
3564 && ! TREE_ADDRESSABLE (exp
)
3565 && ! TREE_THIS_VOLATILE (exp
)
3566 && ! DECL_NONLOCAL (exp
)
3567 /* Don't regard global variables as simple. They may be
3568 allocated in ways unknown to the compiler (shared memory,
3569 #pragma weak, etc). */
3570 && ! TREE_PUBLIC (exp
)
3571 && ! DECL_EXTERNAL (exp
)
3572 /* Loading a static variable is unduly expensive, but global
3573 registers aren't expensive. */
3574 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
3577 /* The following functions are subroutines to fold_range_test and allow it to
3578 try to change a logical combination of comparisons into a range test.
3581 X == 2 || X == 3 || X == 4 || X == 5
3585 (unsigned) (X - 2) <= 3
3587 We describe each set of comparisons as being either inside or outside
3588 a range, using a variable named like IN_P, and then describe the
3589 range with a lower and upper bound. If one of the bounds is omitted,
3590 it represents either the highest or lowest value of the type.
3592 In the comments below, we represent a range by two numbers in brackets
3593 preceded by a "+" to designate being inside that range, or a "-" to
3594 designate being outside that range, so the condition can be inverted by
3595 flipping the prefix. An omitted bound is represented by a "-". For
3596 example, "- [-, 10]" means being outside the range starting at the lowest
3597 possible value and ending at 10, in other words, being greater than 10.
3598 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3601 We set up things so that the missing bounds are handled in a consistent
3602 manner so neither a missing bound nor "true" and "false" need to be
3603 handled using a special case. */
3605 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3606 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3607 and UPPER1_P are nonzero if the respective argument is an upper bound
3608 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3609 must be specified for a comparison. ARG1 will be converted to ARG0's
3610 type if both are specified. */
3613 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
3614 tree arg1
, int upper1_p
)
3620 /* If neither arg represents infinity, do the normal operation.
3621 Else, if not a comparison, return infinity. Else handle the special
3622 comparison rules. Note that most of the cases below won't occur, but
3623 are handled for consistency. */
3625 if (arg0
!= 0 && arg1
!= 0)
3627 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
3628 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
3630 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
3633 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
3636 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3637 for neither. In real maths, we cannot assume open ended ranges are
3638 the same. But, this is computer arithmetic, where numbers are finite.
3639 We can therefore make the transformation of any unbounded range with
3640 the value Z, Z being greater than any representable number. This permits
3641 us to treat unbounded ranges as equal. */
3642 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
3643 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
3647 result
= sgn0
== sgn1
;
3650 result
= sgn0
!= sgn1
;
3653 result
= sgn0
< sgn1
;
3656 result
= sgn0
<= sgn1
;
3659 result
= sgn0
> sgn1
;
3662 result
= sgn0
>= sgn1
;
3668 return constant_boolean_node (result
, type
);
3671 /* Given EXP, a logical expression, set the range it is testing into
3672 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3673 actually being tested. *PLOW and *PHIGH will be made of the same type
3674 as the returned expression. If EXP is not a comparison, we will most
3675 likely not be returning a useful value and range. */
3678 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
)
3680 enum tree_code code
;
3681 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
3682 tree exp_type
= NULL_TREE
, arg0_type
= NULL_TREE
;
3684 tree low
, high
, n_low
, n_high
;
3686 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3687 and see if we can refine the range. Some of the cases below may not
3688 happen, but it doesn't seem worth worrying about this. We "continue"
3689 the outer loop when we've changed something; otherwise we "break"
3690 the switch, which will "break" the while. */
3693 low
= high
= fold_convert (TREE_TYPE (exp
), integer_zero_node
);
3697 code
= TREE_CODE (exp
);
3698 exp_type
= TREE_TYPE (exp
);
3700 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
3702 if (TREE_CODE_LENGTH (code
) > 0)
3703 arg0
= TREE_OPERAND (exp
, 0);
3704 if (TREE_CODE_CLASS (code
) == tcc_comparison
3705 || TREE_CODE_CLASS (code
) == tcc_unary
3706 || TREE_CODE_CLASS (code
) == tcc_binary
)
3707 arg0_type
= TREE_TYPE (arg0
);
3708 if (TREE_CODE_CLASS (code
) == tcc_binary
3709 || TREE_CODE_CLASS (code
) == tcc_comparison
3710 || (TREE_CODE_CLASS (code
) == tcc_expression
3711 && TREE_CODE_LENGTH (code
) > 1))
3712 arg1
= TREE_OPERAND (exp
, 1);
3717 case TRUTH_NOT_EXPR
:
3718 in_p
= ! in_p
, exp
= arg0
;
3721 case EQ_EXPR
: case NE_EXPR
:
3722 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
3723 /* We can only do something if the range is testing for zero
3724 and if the second operand is an integer constant. Note that
3725 saying something is "in" the range we make is done by
3726 complementing IN_P since it will set in the initial case of
3727 being not equal to zero; "out" is leaving it alone. */
3728 if (low
== 0 || high
== 0
3729 || ! integer_zerop (low
) || ! integer_zerop (high
)
3730 || TREE_CODE (arg1
) != INTEGER_CST
)
3735 case NE_EXPR
: /* - [c, c] */
3738 case EQ_EXPR
: /* + [c, c] */
3739 in_p
= ! in_p
, low
= high
= arg1
;
3741 case GT_EXPR
: /* - [-, c] */
3742 low
= 0, high
= arg1
;
3744 case GE_EXPR
: /* + [c, -] */
3745 in_p
= ! in_p
, low
= arg1
, high
= 0;
3747 case LT_EXPR
: /* - [c, -] */
3748 low
= arg1
, high
= 0;
3750 case LE_EXPR
: /* + [-, c] */
3751 in_p
= ! in_p
, low
= 0, high
= arg1
;
3757 /* If this is an unsigned comparison, we also know that EXP is
3758 greater than or equal to zero. We base the range tests we make
3759 on that fact, so we record it here so we can parse existing
3760 range tests. We test arg0_type since often the return type
3761 of, e.g. EQ_EXPR, is boolean. */
3762 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
3764 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3766 fold_convert (arg0_type
, integer_zero_node
),
3770 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
3772 /* If the high bound is missing, but we have a nonzero low
3773 bound, reverse the range so it goes from zero to the low bound
3775 if (high
== 0 && low
&& ! integer_zerop (low
))
3778 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
3779 integer_one_node
, 0);
3780 low
= fold_convert (arg0_type
, integer_zero_node
);
3788 /* (-x) IN [a,b] -> x in [-b, -a] */
3789 n_low
= range_binop (MINUS_EXPR
, exp_type
,
3790 fold_convert (exp_type
, integer_zero_node
),
3792 n_high
= range_binop (MINUS_EXPR
, exp_type
,
3793 fold_convert (exp_type
, integer_zero_node
),
3795 low
= n_low
, high
= n_high
;
3801 exp
= build2 (MINUS_EXPR
, exp_type
, negate_expr (arg0
),
3802 fold_convert (exp_type
, integer_one_node
));
3805 case PLUS_EXPR
: case MINUS_EXPR
:
3806 if (TREE_CODE (arg1
) != INTEGER_CST
)
3809 /* If EXP is signed, any overflow in the computation is undefined,
3810 so we don't worry about it so long as our computations on
3811 the bounds don't overflow. For unsigned, overflow is defined
3812 and this is exactly the right thing. */
3813 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3814 arg0_type
, low
, 0, arg1
, 0);
3815 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3816 arg0_type
, high
, 1, arg1
, 0);
3817 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
3818 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
3821 /* Check for an unsigned range which has wrapped around the maximum
3822 value thus making n_high < n_low, and normalize it. */
3823 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
3825 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
3826 integer_one_node
, 0);
3827 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
3828 integer_one_node
, 0);
3830 /* If the range is of the form +/- [ x+1, x ], we won't
3831 be able to normalize it. But then, it represents the
3832 whole range or the empty set, so make it
3834 if (tree_int_cst_equal (n_low
, low
)
3835 && tree_int_cst_equal (n_high
, high
))
3841 low
= n_low
, high
= n_high
;
3846 case NOP_EXPR
: case NON_LVALUE_EXPR
: case CONVERT_EXPR
:
3847 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
3850 if (! INTEGRAL_TYPE_P (arg0_type
)
3851 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
3852 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
3855 n_low
= low
, n_high
= high
;
3858 n_low
= fold_convert (arg0_type
, n_low
);
3861 n_high
= fold_convert (arg0_type
, n_high
);
3864 /* If we're converting arg0 from an unsigned type, to exp,
3865 a signed type, we will be doing the comparison as unsigned.
3866 The tests above have already verified that LOW and HIGH
3869 So we have to ensure that we will handle large unsigned
3870 values the same way that the current signed bounds treat
3873 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
3876 tree equiv_type
= lang_hooks
.types
.type_for_mode
3877 (TYPE_MODE (arg0_type
), 1);
3879 /* A range without an upper bound is, naturally, unbounded.
3880 Since convert would have cropped a very large value, use
3881 the max value for the destination type. */
3883 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
3884 : TYPE_MAX_VALUE (arg0_type
);
3886 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
3887 high_positive
= fold_build2 (RSHIFT_EXPR
, arg0_type
,
3888 fold_convert (arg0_type
,
3890 fold_convert (arg0_type
,
3893 /* If the low bound is specified, "and" the range with the
3894 range for which the original unsigned value will be
3898 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3899 1, n_low
, n_high
, 1,
3900 fold_convert (arg0_type
,
3905 in_p
= (n_in_p
== in_p
);
3909 /* Otherwise, "or" the range with the range of the input
3910 that will be interpreted as negative. */
3911 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3912 0, n_low
, n_high
, 1,
3913 fold_convert (arg0_type
,
3918 in_p
= (in_p
!= n_in_p
);
3923 low
= n_low
, high
= n_high
;
3933 /* If EXP is a constant, we can evaluate whether this is true or false. */
3934 if (TREE_CODE (exp
) == INTEGER_CST
)
3936 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
3938 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
3944 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
3948 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3949 type, TYPE, return an expression to test if EXP is in (or out of, depending
3950 on IN_P) the range. Return 0 if the test couldn't be created. */
3953 build_range_check (tree type
, tree exp
, int in_p
, tree low
, tree high
)
3955 tree etype
= TREE_TYPE (exp
);
3958 #ifdef HAVE_canonicalize_funcptr_for_compare
3959 /* Disable this optimization for function pointer expressions
3960 on targets that require function pointer canonicalization. */
3961 if (HAVE_canonicalize_funcptr_for_compare
3962 && TREE_CODE (etype
) == POINTER_TYPE
3963 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
3969 value
= build_range_check (type
, exp
, 1, low
, high
);
3971 return invert_truthvalue (value
);
3976 if (low
== 0 && high
== 0)
3977 return fold_convert (type
, integer_one_node
);
3980 return fold_build2 (LE_EXPR
, type
, exp
,
3981 fold_convert (etype
, high
));
3984 return fold_build2 (GE_EXPR
, type
, exp
,
3985 fold_convert (etype
, low
));
3987 if (operand_equal_p (low
, high
, 0))
3988 return fold_build2 (EQ_EXPR
, type
, exp
,
3989 fold_convert (etype
, low
));
3991 if (integer_zerop (low
))
3993 if (! TYPE_UNSIGNED (etype
))
3995 etype
= lang_hooks
.types
.unsigned_type (etype
);
3996 high
= fold_convert (etype
, high
);
3997 exp
= fold_convert (etype
, exp
);
3999 return build_range_check (type
, exp
, 1, 0, high
);
4002 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4003 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4005 unsigned HOST_WIDE_INT lo
;
4009 prec
= TYPE_PRECISION (etype
);
4010 if (prec
<= HOST_BITS_PER_WIDE_INT
)
4013 lo
= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1)) - 1;
4017 hi
= ((HOST_WIDE_INT
) 1 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)) - 1;
4018 lo
= (unsigned HOST_WIDE_INT
) -1;
4021 if (TREE_INT_CST_HIGH (high
) == hi
&& TREE_INT_CST_LOW (high
) == lo
)
4023 if (TYPE_UNSIGNED (etype
))
4025 etype
= lang_hooks
.types
.signed_type (etype
);
4026 exp
= fold_convert (etype
, exp
);
4028 return fold_build2 (GT_EXPR
, type
, exp
,
4029 fold_convert (etype
, integer_zero_node
));
4033 value
= const_binop (MINUS_EXPR
, high
, low
, 0);
4034 if (value
!= 0 && (!flag_wrapv
|| TREE_OVERFLOW (value
))
4035 && ! TYPE_UNSIGNED (etype
))
4037 tree utype
, minv
, maxv
;
4039 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4040 for the type in question, as we rely on this here. */
4041 switch (TREE_CODE (etype
))
4046 /* There is no requirement that LOW be within the range of ETYPE
4047 if the latter is a subtype. It must, however, be within the base
4048 type of ETYPE. So be sure we do the subtraction in that type. */
4049 if (TREE_TYPE (etype
))
4050 etype
= TREE_TYPE (etype
);
4051 utype
= lang_hooks
.types
.unsigned_type (etype
);
4052 maxv
= fold_convert (utype
, TYPE_MAX_VALUE (etype
));
4053 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4054 integer_one_node
, 1);
4055 minv
= fold_convert (utype
, TYPE_MIN_VALUE (etype
));
4056 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4060 high
= fold_convert (etype
, high
);
4061 low
= fold_convert (etype
, low
);
4062 exp
= fold_convert (etype
, exp
);
4063 value
= const_binop (MINUS_EXPR
, high
, low
, 0);
4071 if (value
!= 0 && ! TREE_OVERFLOW (value
))
4073 /* There is no requirement that LOW be within the range of ETYPE
4074 if the latter is a subtype. It must, however, be within the base
4075 type of ETYPE. So be sure we do the subtraction in that type. */
4076 if (INTEGRAL_TYPE_P (etype
) && TREE_TYPE (etype
))
4078 etype
= TREE_TYPE (etype
);
4079 exp
= fold_convert (etype
, exp
);
4080 low
= fold_convert (etype
, low
);
4081 value
= fold_convert (etype
, value
);
4084 return build_range_check (type
,
4085 fold_build2 (MINUS_EXPR
, etype
, exp
, low
),
4086 1, build_int_cst (etype
, 0), value
);
4092 /* Given two ranges, see if we can merge them into one. Return 1 if we
4093 can, 0 if we can't. Set the output range into the specified parameters. */
4096 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4097 tree high0
, int in1_p
, tree low1
, tree high1
)
4105 int lowequal
= ((low0
== 0 && low1
== 0)
4106 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4107 low0
, 0, low1
, 0)));
4108 int highequal
= ((high0
== 0 && high1
== 0)
4109 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4110 high0
, 1, high1
, 1)));
4112 /* Make range 0 be the range that starts first, or ends last if they
4113 start at the same value. Swap them if it isn't. */
4114 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4117 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4118 high1
, 1, high0
, 1))))
4120 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4121 tem
= low0
, low0
= low1
, low1
= tem
;
4122 tem
= high0
, high0
= high1
, high1
= tem
;
4125 /* Now flag two cases, whether the ranges are disjoint or whether the
4126 second range is totally subsumed in the first. Note that the tests
4127 below are simplified by the ones above. */
4128 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4129 high0
, 1, low1
, 0));
4130 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4131 high1
, 1, high0
, 1));
4133 /* We now have four cases, depending on whether we are including or
4134 excluding the two ranges. */
4137 /* If they don't overlap, the result is false. If the second range
4138 is a subset it is the result. Otherwise, the range is from the start
4139 of the second to the end of the first. */
4141 in_p
= 0, low
= high
= 0;
4143 in_p
= 1, low
= low1
, high
= high1
;
4145 in_p
= 1, low
= low1
, high
= high0
;
4148 else if (in0_p
&& ! in1_p
)
4150 /* If they don't overlap, the result is the first range. If they are
4151 equal, the result is false. If the second range is a subset of the
4152 first, and the ranges begin at the same place, we go from just after
4153 the end of the first range to the end of the second. If the second
4154 range is not a subset of the first, or if it is a subset and both
4155 ranges end at the same place, the range starts at the start of the
4156 first range and ends just before the second range.
4157 Otherwise, we can't describe this as a single range. */
4159 in_p
= 1, low
= low0
, high
= high0
;
4160 else if (lowequal
&& highequal
)
4161 in_p
= 0, low
= high
= 0;
4162 else if (subset
&& lowequal
)
4164 in_p
= 1, high
= high0
;
4165 low
= range_binop (PLUS_EXPR
, NULL_TREE
, high1
, 0,
4166 integer_one_node
, 0);
4168 else if (! subset
|| highequal
)
4170 in_p
= 1, low
= low0
;
4171 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low1
, 0,
4172 integer_one_node
, 0);
4178 else if (! in0_p
&& in1_p
)
4180 /* If they don't overlap, the result is the second range. If the second
4181 is a subset of the first, the result is false. Otherwise,
4182 the range starts just after the first range and ends at the
4183 end of the second. */
4185 in_p
= 1, low
= low1
, high
= high1
;
4186 else if (subset
|| highequal
)
4187 in_p
= 0, low
= high
= 0;
4190 in_p
= 1, high
= high1
;
4191 low
= range_binop (PLUS_EXPR
, NULL_TREE
, high0
, 1,
4192 integer_one_node
, 0);
4198 /* The case where we are excluding both ranges. Here the complex case
4199 is if they don't overlap. In that case, the only time we have a
4200 range is if they are adjacent. If the second is a subset of the
4201 first, the result is the first. Otherwise, the range to exclude
4202 starts at the beginning of the first range and ends at the end of the
4206 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4207 range_binop (PLUS_EXPR
, NULL_TREE
,
4209 integer_one_node
, 1),
4211 in_p
= 0, low
= low0
, high
= high1
;
4214 /* Canonicalize - [min, x] into - [-, x]. */
4215 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4216 switch (TREE_CODE (TREE_TYPE (low0
)))
4219 if (TYPE_PRECISION (TREE_TYPE (low0
))
4220 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4225 if (tree_int_cst_equal (low0
,
4226 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4230 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4231 && integer_zerop (low0
))
4238 /* Canonicalize - [x, max] into - [x, -]. */
4239 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4240 switch (TREE_CODE (TREE_TYPE (high1
)))
4243 if (TYPE_PRECISION (TREE_TYPE (high1
))
4244 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4249 if (tree_int_cst_equal (high1
,
4250 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4254 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4255 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4257 integer_one_node
, 1)))
4264 /* The ranges might be also adjacent between the maximum and
4265 minimum values of the given type. For
4266 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4267 return + [x + 1, y - 1]. */
4268 if (low0
== 0 && high1
== 0)
4270 low
= range_binop (PLUS_EXPR
, NULL_TREE
, high0
, 1,
4271 integer_one_node
, 1);
4272 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low1
, 0,
4273 integer_one_node
, 0);
4274 if (low
== 0 || high
== 0)
4284 in_p
= 0, low
= low0
, high
= high0
;
4286 in_p
= 0, low
= low0
, high
= high1
;
4289 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4294 /* Subroutine of fold, looking inside expressions of the form
4295 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4296 of the COND_EXPR. This function is being used also to optimize
4297 A op B ? C : A, by reversing the comparison first.
4299 Return a folded expression whose code is not a COND_EXPR
4300 anymore, or NULL_TREE if no folding opportunity is found. */
4303 fold_cond_expr_with_comparison (tree type
, tree arg0
, tree arg1
, tree arg2
)
4305 enum tree_code comp_code
= TREE_CODE (arg0
);
4306 tree arg00
= TREE_OPERAND (arg0
, 0);
4307 tree arg01
= TREE_OPERAND (arg0
, 1);
4308 tree arg1_type
= TREE_TYPE (arg1
);
4314 /* If we have A op 0 ? A : -A, consider applying the following
4317 A == 0? A : -A same as -A
4318 A != 0? A : -A same as A
4319 A >= 0? A : -A same as abs (A)
4320 A > 0? A : -A same as abs (A)
4321 A <= 0? A : -A same as -abs (A)
4322 A < 0? A : -A same as -abs (A)
4324 None of these transformations work for modes with signed
4325 zeros. If A is +/-0, the first two transformations will
4326 change the sign of the result (from +0 to -0, or vice
4327 versa). The last four will fix the sign of the result,
4328 even though the original expressions could be positive or
4329 negative, depending on the sign of A.
4331 Note that all these transformations are correct if A is
4332 NaN, since the two alternatives (A and -A) are also NaNs. */
4333 if ((FLOAT_TYPE_P (TREE_TYPE (arg01
))
4334 ? real_zerop (arg01
)
4335 : integer_zerop (arg01
))
4336 && ((TREE_CODE (arg2
) == NEGATE_EXPR
4337 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4338 /* In the case that A is of the form X-Y, '-A' (arg2) may
4339 have already been folded to Y-X, check for that. */
4340 || (TREE_CODE (arg1
) == MINUS_EXPR
4341 && TREE_CODE (arg2
) == MINUS_EXPR
4342 && operand_equal_p (TREE_OPERAND (arg1
, 0),
4343 TREE_OPERAND (arg2
, 1), 0)
4344 && operand_equal_p (TREE_OPERAND (arg1
, 1),
4345 TREE_OPERAND (arg2
, 0), 0))))
4350 tem
= fold_convert (arg1_type
, arg1
);
4351 return pedantic_non_lvalue (fold_convert (type
, negate_expr (tem
)));
4354 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4357 if (flag_trapping_math
)
4362 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4363 arg1
= fold_convert (lang_hooks
.types
.signed_type
4364 (TREE_TYPE (arg1
)), arg1
);
4365 tem
= fold_build1 (ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4366 return pedantic_non_lvalue (fold_convert (type
, tem
));
4369 if (flag_trapping_math
)
4373 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4374 arg1
= fold_convert (lang_hooks
.types
.signed_type
4375 (TREE_TYPE (arg1
)), arg1
);
4376 tem
= fold_build1 (ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4377 return negate_expr (fold_convert (type
, tem
));
4379 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4383 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4384 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4385 both transformations are correct when A is NaN: A != 0
4386 is then true, and A == 0 is false. */
4388 if (integer_zerop (arg01
) && integer_zerop (arg2
))
4390 if (comp_code
== NE_EXPR
)
4391 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4392 else if (comp_code
== EQ_EXPR
)
4393 return fold_convert (type
, integer_zero_node
);
4396 /* Try some transformations of A op B ? A : B.
4398 A == B? A : B same as B
4399 A != B? A : B same as A
4400 A >= B? A : B same as max (A, B)
4401 A > B? A : B same as max (B, A)
4402 A <= B? A : B same as min (A, B)
4403 A < B? A : B same as min (B, A)
4405 As above, these transformations don't work in the presence
4406 of signed zeros. For example, if A and B are zeros of
4407 opposite sign, the first two transformations will change
4408 the sign of the result. In the last four, the original
4409 expressions give different results for (A=+0, B=-0) and
4410 (A=-0, B=+0), but the transformed expressions do not.
4412 The first two transformations are correct if either A or B
4413 is a NaN. In the first transformation, the condition will
4414 be false, and B will indeed be chosen. In the case of the
4415 second transformation, the condition A != B will be true,
4416 and A will be chosen.
4418 The conversions to max() and min() are not correct if B is
4419 a number and A is not. The conditions in the original
4420 expressions will be false, so all four give B. The min()
4421 and max() versions would give a NaN instead. */
4422 if (operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
4423 /* Avoid these transformations if the COND_EXPR may be used
4424 as an lvalue in the C++ front-end. PR c++/19199. */
4426 || strcmp (lang_hooks
.name
, "GNU C++") != 0
4427 || ! maybe_lvalue_p (arg1
)
4428 || ! maybe_lvalue_p (arg2
)))
4430 tree comp_op0
= arg00
;
4431 tree comp_op1
= arg01
;
4432 tree comp_type
= TREE_TYPE (comp_op0
);
4434 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4435 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
4445 return pedantic_non_lvalue (fold_convert (type
, arg2
));
4447 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4452 /* In C++ a ?: expression can be an lvalue, so put the
4453 operand which will be used if they are equal first
4454 so that we can convert this back to the
4455 corresponding COND_EXPR. */
4456 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4458 comp_op0
= fold_convert (comp_type
, comp_op0
);
4459 comp_op1
= fold_convert (comp_type
, comp_op1
);
4460 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
4461 ? fold_build2 (MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
4462 : fold_build2 (MIN_EXPR
, comp_type
, comp_op1
, comp_op0
);
4463 return pedantic_non_lvalue (fold_convert (type
, tem
));
4470 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4472 comp_op0
= fold_convert (comp_type
, comp_op0
);
4473 comp_op1
= fold_convert (comp_type
, comp_op1
);
4474 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
4475 ? fold_build2 (MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
4476 : fold_build2 (MAX_EXPR
, comp_type
, comp_op1
, comp_op0
);
4477 return pedantic_non_lvalue (fold_convert (type
, tem
));
4481 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4482 return pedantic_non_lvalue (fold_convert (type
, arg2
));
4485 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4486 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4489 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4494 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4495 we might still be able to simplify this. For example,
4496 if C1 is one less or one more than C2, this might have started
4497 out as a MIN or MAX and been transformed by this function.
4498 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4500 if (INTEGRAL_TYPE_P (type
)
4501 && TREE_CODE (arg01
) == INTEGER_CST
4502 && TREE_CODE (arg2
) == INTEGER_CST
)
4506 /* We can replace A with C1 in this case. */
4507 arg1
= fold_convert (type
, arg01
);
4508 return fold_build3 (COND_EXPR
, type
, arg0
, arg1
, arg2
);
4511 /* If C1 is C2 + 1, this is min(A, C2). */
4512 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4514 && operand_equal_p (arg01
,
4515 const_binop (PLUS_EXPR
, arg2
,
4516 integer_one_node
, 0),
4518 return pedantic_non_lvalue (fold_build2 (MIN_EXPR
,
4523 /* If C1 is C2 - 1, this is min(A, C2). */
4524 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4526 && operand_equal_p (arg01
,
4527 const_binop (MINUS_EXPR
, arg2
,
4528 integer_one_node
, 0),
4530 return pedantic_non_lvalue (fold_build2 (MIN_EXPR
,
4535 /* If C1 is C2 - 1, this is max(A, C2). */
4536 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4538 && operand_equal_p (arg01
,
4539 const_binop (MINUS_EXPR
, arg2
,
4540 integer_one_node
, 0),
4542 return pedantic_non_lvalue (fold_build2 (MAX_EXPR
,
4547 /* If C1 is C2 + 1, this is max(A, C2). */
4548 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4550 && operand_equal_p (arg01
,
4551 const_binop (PLUS_EXPR
, arg2
,
4552 integer_one_node
, 0),
4554 return pedantic_non_lvalue (fold_build2 (MAX_EXPR
,
4568 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4569 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4572 /* EXP is some logical combination of boolean tests. See if we can
4573 merge it into some range test. Return the new tree if so. */
4576 fold_range_test (enum tree_code code
, tree type
, tree op0
, tree op1
)
4578 int or_op
= (code
== TRUTH_ORIF_EXPR
4579 || code
== TRUTH_OR_EXPR
);
4580 int in0_p
, in1_p
, in_p
;
4581 tree low0
, low1
, low
, high0
, high1
, high
;
4582 tree lhs
= make_range (op0
, &in0_p
, &low0
, &high0
);
4583 tree rhs
= make_range (op1
, &in1_p
, &low1
, &high1
);
4586 /* If this is an OR operation, invert both sides; we will invert
4587 again at the end. */
4589 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
4591 /* If both expressions are the same, if we can merge the ranges, and we
4592 can build the range test, return it or it inverted. If one of the
4593 ranges is always true or always false, consider it to be the same
4594 expression as the other. */
4595 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
4596 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
4598 && 0 != (tem
= (build_range_check (type
,
4600 : rhs
!= 0 ? rhs
: integer_zero_node
,
4602 return or_op
? invert_truthvalue (tem
) : tem
;
4604 /* On machines where the branch cost is expensive, if this is a
4605 short-circuited branch and the underlying object on both sides
4606 is the same, make a non-short-circuit operation. */
4607 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4608 && lhs
!= 0 && rhs
!= 0
4609 && (code
== TRUTH_ANDIF_EXPR
4610 || code
== TRUTH_ORIF_EXPR
)
4611 && operand_equal_p (lhs
, rhs
, 0))
4613 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4614 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4615 which cases we can't do this. */
4616 if (simple_operand_p (lhs
))
4617 return build2 (code
== TRUTH_ANDIF_EXPR
4618 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4621 else if (lang_hooks
.decls
.global_bindings_p () == 0
4622 && ! CONTAINS_PLACEHOLDER_P (lhs
))
4624 tree common
= save_expr (lhs
);
4626 if (0 != (lhs
= build_range_check (type
, common
,
4627 or_op
? ! in0_p
: in0_p
,
4629 && (0 != (rhs
= build_range_check (type
, common
,
4630 or_op
? ! in1_p
: in1_p
,
4632 return build2 (code
== TRUTH_ANDIF_EXPR
4633 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4641 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4642 bit value. Arrange things so the extra bits will be set to zero if and
4643 only if C is signed-extended to its full width. If MASK is nonzero,
4644 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4647 unextend (tree c
, int p
, int unsignedp
, tree mask
)
4649 tree type
= TREE_TYPE (c
);
4650 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
4653 if (p
== modesize
|| unsignedp
)
4656 /* We work by getting just the sign bit into the low-order bit, then
4657 into the high-order bit, then sign-extend. We then XOR that value
4659 temp
= const_binop (RSHIFT_EXPR
, c
, size_int (p
- 1), 0);
4660 temp
= const_binop (BIT_AND_EXPR
, temp
, size_int (1), 0);
4662 /* We must use a signed type in order to get an arithmetic right shift.
4663 However, we must also avoid introducing accidental overflows, so that
4664 a subsequent call to integer_zerop will work. Hence we must
4665 do the type conversion here. At this point, the constant is either
4666 zero or one, and the conversion to a signed type can never overflow.
4667 We could get an overflow if this conversion is done anywhere else. */
4668 if (TYPE_UNSIGNED (type
))
4669 temp
= fold_convert (lang_hooks
.types
.signed_type (type
), temp
);
4671 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1), 0);
4672 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1), 0);
4674 temp
= const_binop (BIT_AND_EXPR
, temp
,
4675 fold_convert (TREE_TYPE (c
), mask
), 0);
4676 /* If necessary, convert the type back to match the type of C. */
4677 if (TYPE_UNSIGNED (type
))
4678 temp
= fold_convert (type
, temp
);
4680 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
, 0));
4683 /* Find ways of folding logical expressions of LHS and RHS:
4684 Try to merge two comparisons to the same innermost item.
4685 Look for range tests like "ch >= '0' && ch <= '9'".
4686 Look for combinations of simple terms on machines with expensive branches
4687 and evaluate the RHS unconditionally.
4689 For example, if we have p->a == 2 && p->b == 4 and we can make an
4690 object large enough to span both A and B, we can do this with a comparison
4691 against the object ANDed with the a mask.
4693 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4694 operations to do this with one comparison.
4696 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4697 function and the one above.
4699 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4700 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4702 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4705 We return the simplified tree or 0 if no optimization is possible. */
4708 fold_truthop (enum tree_code code
, tree truth_type
, tree lhs
, tree rhs
)
4710 /* If this is the "or" of two comparisons, we can do something if
4711 the comparisons are NE_EXPR. If this is the "and", we can do something
4712 if the comparisons are EQ_EXPR. I.e.,
4713 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4715 WANTED_CODE is this operation code. For single bit fields, we can
4716 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4717 comparison for one-bit fields. */
4719 enum tree_code wanted_code
;
4720 enum tree_code lcode
, rcode
;
4721 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
4722 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
4723 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
4724 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
4725 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
4726 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
4727 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
4728 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
4729 enum machine_mode lnmode
, rnmode
;
4730 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
4731 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
4732 tree l_const
, r_const
;
4733 tree lntype
, rntype
, result
;
4734 int first_bit
, end_bit
;
4737 /* Start by getting the comparison codes. Fail if anything is volatile.
4738 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4739 it were surrounded with a NE_EXPR. */
4741 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
4744 lcode
= TREE_CODE (lhs
);
4745 rcode
= TREE_CODE (rhs
);
4747 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
4749 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
4750 fold_convert (TREE_TYPE (lhs
), integer_zero_node
));
4754 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
4756 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
4757 fold_convert (TREE_TYPE (rhs
), integer_zero_node
));
4761 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
4762 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
4765 ll_arg
= TREE_OPERAND (lhs
, 0);
4766 lr_arg
= TREE_OPERAND (lhs
, 1);
4767 rl_arg
= TREE_OPERAND (rhs
, 0);
4768 rr_arg
= TREE_OPERAND (rhs
, 1);
4770 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4771 if (simple_operand_p (ll_arg
)
4772 && simple_operand_p (lr_arg
))
4775 if (operand_equal_p (ll_arg
, rl_arg
, 0)
4776 && operand_equal_p (lr_arg
, rr_arg
, 0))
4778 result
= combine_comparisons (code
, lcode
, rcode
,
4779 truth_type
, ll_arg
, lr_arg
);
4783 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
4784 && operand_equal_p (lr_arg
, rl_arg
, 0))
4786 result
= combine_comparisons (code
, lcode
,
4787 swap_tree_comparison (rcode
),
4788 truth_type
, ll_arg
, lr_arg
);
4794 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
4795 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
4797 /* If the RHS can be evaluated unconditionally and its operands are
4798 simple, it wins to evaluate the RHS unconditionally on machines
4799 with expensive branches. In this case, this isn't a comparison
4800 that can be merged. Avoid doing this if the RHS is a floating-point
4801 comparison since those can trap. */
4803 if (BRANCH_COST
>= 2
4804 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
4805 && simple_operand_p (rl_arg
)
4806 && simple_operand_p (rr_arg
))
4808 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4809 if (code
== TRUTH_OR_EXPR
4810 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
4811 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
4812 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
))
4813 return build2 (NE_EXPR
, truth_type
,
4814 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
4816 fold_convert (TREE_TYPE (ll_arg
), integer_zero_node
));
4818 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4819 if (code
== TRUTH_AND_EXPR
4820 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
4821 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
4822 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
))
4823 return build2 (EQ_EXPR
, truth_type
,
4824 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
4826 fold_convert (TREE_TYPE (ll_arg
), integer_zero_node
));
4828 if (LOGICAL_OP_NON_SHORT_CIRCUIT
)
4829 return build2 (code
, truth_type
, lhs
, rhs
);
4832 /* See if the comparisons can be merged. Then get all the parameters for
4835 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
4836 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
4840 ll_inner
= decode_field_reference (ll_arg
,
4841 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
4842 &ll_unsignedp
, &volatilep
, &ll_mask
,
4844 lr_inner
= decode_field_reference (lr_arg
,
4845 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
4846 &lr_unsignedp
, &volatilep
, &lr_mask
,
4848 rl_inner
= decode_field_reference (rl_arg
,
4849 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
4850 &rl_unsignedp
, &volatilep
, &rl_mask
,
4852 rr_inner
= decode_field_reference (rr_arg
,
4853 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
4854 &rr_unsignedp
, &volatilep
, &rr_mask
,
4857 /* It must be true that the inner operation on the lhs of each
4858 comparison must be the same if we are to be able to do anything.
4859 Then see if we have constants. If not, the same must be true for
4861 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
4862 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
4865 if (TREE_CODE (lr_arg
) == INTEGER_CST
4866 && TREE_CODE (rr_arg
) == INTEGER_CST
)
4867 l_const
= lr_arg
, r_const
= rr_arg
;
4868 else if (lr_inner
== 0 || rr_inner
== 0
4869 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
4872 l_const
= r_const
= 0;
4874 /* If either comparison code is not correct for our logical operation,
4875 fail. However, we can convert a one-bit comparison against zero into
4876 the opposite comparison against that bit being set in the field. */
4878 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
4879 if (lcode
!= wanted_code
)
4881 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
4883 /* Make the left operand unsigned, since we are only interested
4884 in the value of one bit. Otherwise we are doing the wrong
4893 /* This is analogous to the code for l_const above. */
4894 if (rcode
!= wanted_code
)
4896 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
4905 /* After this point all optimizations will generate bit-field
4906 references, which we might not want. */
4907 if (! lang_hooks
.can_use_bit_fields_p ())
4910 /* See if we can find a mode that contains both fields being compared on
4911 the left. If we can't, fail. Otherwise, update all constants and masks
4912 to be relative to a field of that size. */
4913 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
4914 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
4915 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
4916 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
4918 if (lnmode
== VOIDmode
)
4921 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
4922 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
4923 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
4924 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
4926 if (BYTES_BIG_ENDIAN
)
4928 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
4929 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
4932 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert (lntype
, ll_mask
),
4933 size_int (xll_bitpos
), 0);
4934 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert (lntype
, rl_mask
),
4935 size_int (xrl_bitpos
), 0);
4939 l_const
= fold_convert (lntype
, l_const
);
4940 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
4941 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
), 0);
4942 if (integer_nonzerop (const_binop (BIT_AND_EXPR
, l_const
,
4943 fold_build1 (BIT_NOT_EXPR
,
4947 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
4949 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
4954 r_const
= fold_convert (lntype
, r_const
);
4955 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
4956 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
), 0);
4957 if (integer_nonzerop (const_binop (BIT_AND_EXPR
, r_const
,
4958 fold_build1 (BIT_NOT_EXPR
,
4962 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
4964 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
4968 /* If the right sides are not constant, do the same for it. Also,
4969 disallow this optimization if a size or signedness mismatch occurs
4970 between the left and right sides. */
4973 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
4974 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
4975 /* Make sure the two fields on the right
4976 correspond to the left without being swapped. */
4977 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
4980 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
4981 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
4982 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
4983 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
4985 if (rnmode
== VOIDmode
)
4988 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
4989 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
4990 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
4991 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
4993 if (BYTES_BIG_ENDIAN
)
4995 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
4996 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
4999 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert (rntype
, lr_mask
),
5000 size_int (xlr_bitpos
), 0);
5001 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert (rntype
, rr_mask
),
5002 size_int (xrr_bitpos
), 0);
5004 /* Make a mask that corresponds to both fields being compared.
5005 Do this for both items being compared. If the operands are the
5006 same size and the bits being compared are in the same position
5007 then we can do this by masking both and comparing the masked
5009 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
5010 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
, 0);
5011 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
5013 lhs
= make_bit_field_ref (ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5014 ll_unsignedp
|| rl_unsignedp
);
5015 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5016 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
5018 rhs
= make_bit_field_ref (lr_inner
, rntype
, rnbitsize
, rnbitpos
,
5019 lr_unsignedp
|| rr_unsignedp
);
5020 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
5021 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
5023 return build2 (wanted_code
, truth_type
, lhs
, rhs
);
5026 /* There is still another way we can do something: If both pairs of
5027 fields being compared are adjacent, we may be able to make a wider
5028 field containing them both.
5030 Note that we still must mask the lhs/rhs expressions. Furthermore,
5031 the mask must be shifted to account for the shift done by
5032 make_bit_field_ref. */
5033 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
5034 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
5035 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
5036 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
5040 lhs
= make_bit_field_ref (ll_inner
, lntype
, ll_bitsize
+ rl_bitsize
,
5041 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
5042 rhs
= make_bit_field_ref (lr_inner
, rntype
, lr_bitsize
+ rr_bitsize
,
5043 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
5045 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
5046 size_int (MIN (xll_bitpos
, xrl_bitpos
)), 0);
5047 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
5048 size_int (MIN (xlr_bitpos
, xrr_bitpos
)), 0);
5050 /* Convert to the smaller type before masking out unwanted bits. */
5052 if (lntype
!= rntype
)
5054 if (lnbitsize
> rnbitsize
)
5056 lhs
= fold_convert (rntype
, lhs
);
5057 ll_mask
= fold_convert (rntype
, ll_mask
);
5060 else if (lnbitsize
< rnbitsize
)
5062 rhs
= fold_convert (lntype
, rhs
);
5063 lr_mask
= fold_convert (lntype
, lr_mask
);
5068 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
5069 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
5071 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
5072 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
5074 return build2 (wanted_code
, truth_type
, lhs
, rhs
);
5080 /* Handle the case of comparisons with constants. If there is something in
5081 common between the masks, those bits of the constants must be the same.
5082 If not, the condition is always false. Test for this to avoid generating
5083 incorrect code below. */
5084 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
, 0);
5085 if (! integer_zerop (result
)
5086 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
, 0),
5087 const_binop (BIT_AND_EXPR
, result
, r_const
, 0)) != 1)
5089 if (wanted_code
== NE_EXPR
)
5091 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5092 return constant_boolean_node (true, truth_type
);
5096 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5097 return constant_boolean_node (false, truth_type
);
5101 /* Construct the expression we will return. First get the component
5102 reference we will make. Unless the mask is all ones the width of
5103 that field, perform the mask operation. Then compare with the
5105 result
= make_bit_field_ref (ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5106 ll_unsignedp
|| rl_unsignedp
);
5108 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
5109 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5110 result
= build2 (BIT_AND_EXPR
, lntype
, result
, ll_mask
);
5112 return build2 (wanted_code
, truth_type
, result
,
5113 const_binop (BIT_IOR_EXPR
, l_const
, r_const
, 0));
5116 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5120 optimize_minmax_comparison (enum tree_code code
, tree type
, tree op0
, tree op1
)
5123 enum tree_code op_code
;
5124 tree comp_const
= op1
;
5126 int consts_equal
, consts_lt
;
5129 STRIP_SIGN_NOPS (arg0
);
5131 op_code
= TREE_CODE (arg0
);
5132 minmax_const
= TREE_OPERAND (arg0
, 1);
5133 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
5134 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
5135 inner
= TREE_OPERAND (arg0
, 0);
5137 /* If something does not permit us to optimize, return the original tree. */
5138 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
5139 || TREE_CODE (comp_const
) != INTEGER_CST
5140 || TREE_CONSTANT_OVERFLOW (comp_const
)
5141 || TREE_CODE (minmax_const
) != INTEGER_CST
5142 || TREE_CONSTANT_OVERFLOW (minmax_const
))
5145 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5146 and GT_EXPR, doing the rest with recursive calls using logical
5150 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
5152 /* FIXME: We should be able to invert code without building a
5153 scratch tree node, but doing so would require us to
5154 duplicate a part of invert_truthvalue here. */
5155 tree tem
= invert_truthvalue (build2 (code
, type
, op0
, op1
));
5156 tem
= optimize_minmax_comparison (TREE_CODE (tem
),
5158 TREE_OPERAND (tem
, 0),
5159 TREE_OPERAND (tem
, 1));
5160 return invert_truthvalue (tem
);
5165 fold_build2 (TRUTH_ORIF_EXPR
, type
,
5166 optimize_minmax_comparison
5167 (EQ_EXPR
, type
, arg0
, comp_const
),
5168 optimize_minmax_comparison
5169 (GT_EXPR
, type
, arg0
, comp_const
));
5172 if (op_code
== MAX_EXPR
&& consts_equal
)
5173 /* MAX (X, 0) == 0 -> X <= 0 */
5174 return fold_build2 (LE_EXPR
, type
, inner
, comp_const
);
5176 else if (op_code
== MAX_EXPR
&& consts_lt
)
5177 /* MAX (X, 0) == 5 -> X == 5 */
5178 return fold_build2 (EQ_EXPR
, type
, inner
, comp_const
);
5180 else if (op_code
== MAX_EXPR
)
5181 /* MAX (X, 0) == -1 -> false */
5182 return omit_one_operand (type
, integer_zero_node
, inner
);
5184 else if (consts_equal
)
5185 /* MIN (X, 0) == 0 -> X >= 0 */
5186 return fold_build2 (GE_EXPR
, type
, inner
, comp_const
);
5189 /* MIN (X, 0) == 5 -> false */
5190 return omit_one_operand (type
, integer_zero_node
, inner
);
5193 /* MIN (X, 0) == -1 -> X == -1 */
5194 return fold_build2 (EQ_EXPR
, type
, inner
, comp_const
);
5197 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5198 /* MAX (X, 0) > 0 -> X > 0
5199 MAX (X, 0) > 5 -> X > 5 */
5200 return fold_build2 (GT_EXPR
, type
, inner
, comp_const
);
5202 else if (op_code
== MAX_EXPR
)
5203 /* MAX (X, 0) > -1 -> true */
5204 return omit_one_operand (type
, integer_one_node
, inner
);
5206 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5207 /* MIN (X, 0) > 0 -> false
5208 MIN (X, 0) > 5 -> false */
5209 return omit_one_operand (type
, integer_zero_node
, inner
);
5212 /* MIN (X, 0) > -1 -> X > -1 */
5213 return fold_build2 (GT_EXPR
, type
, inner
, comp_const
);
5220 /* T is an integer expression that is being multiplied, divided, or taken a
5221 modulus (CODE says which and what kind of divide or modulus) by a
5222 constant C. See if we can eliminate that operation by folding it with
5223 other operations already in T. WIDE_TYPE, if non-null, is a type that
5224 should be used for the computation if wider than our type.
5226 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5227 (X * 2) + (Y * 4). We must, however, be assured that either the original
5228 expression would not overflow or that overflow is undefined for the type
5229 in the language in question.
5231 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5232 the machine has a multiply-accumulate insn or that this is part of an
5233 addressing calculation.
5235 If we return a non-null expression, it is an equivalent form of the
5236 original computation, but need not be in the original type. */
5239 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
)
5241 /* To avoid exponential search depth, refuse to allow recursion past
5242 three levels. Beyond that (1) it's highly unlikely that we'll find
5243 something interesting and (2) we've probably processed it before
5244 when we built the inner expression. */
5253 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
);
5260 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
)
5262 tree type
= TREE_TYPE (t
);
5263 enum tree_code tcode
= TREE_CODE (t
);
5264 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
5265 > GET_MODE_SIZE (TYPE_MODE (type
)))
5266 ? wide_type
: type
);
5268 int same_p
= tcode
== code
;
5269 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
5271 /* Don't deal with constants of zero here; they confuse the code below. */
5272 if (integer_zerop (c
))
5275 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
5276 op0
= TREE_OPERAND (t
, 0);
5278 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
5279 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
5281 /* Note that we need not handle conditional operations here since fold
5282 already handles those cases. So just do arithmetic here. */
5286 /* For a constant, we can always simplify if we are a multiply
5287 or (for divide and modulus) if it is a multiple of our constant. */
5288 if (code
== MULT_EXPR
5289 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, t
, c
, 0)))
5290 return const_binop (code
, fold_convert (ctype
, t
),
5291 fold_convert (ctype
, c
), 0);
5294 case CONVERT_EXPR
: case NON_LVALUE_EXPR
: case NOP_EXPR
:
5295 /* If op0 is an expression ... */
5296 if ((COMPARISON_CLASS_P (op0
)
5297 || UNARY_CLASS_P (op0
)
5298 || BINARY_CLASS_P (op0
)
5299 || EXPRESSION_CLASS_P (op0
))
5300 /* ... and is unsigned, and its type is smaller than ctype,
5301 then we cannot pass through as widening. */
5302 && ((TYPE_UNSIGNED (TREE_TYPE (op0
))
5303 && ! (TREE_CODE (TREE_TYPE (op0
)) == INTEGER_TYPE
5304 && TYPE_IS_SIZETYPE (TREE_TYPE (op0
)))
5305 && (GET_MODE_SIZE (TYPE_MODE (ctype
))
5306 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0
)))))
5307 /* ... or this is a truncation (t is narrower than op0),
5308 then we cannot pass through this narrowing. */
5309 || (GET_MODE_SIZE (TYPE_MODE (type
))
5310 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0
))))
5311 /* ... or signedness changes for division or modulus,
5312 then we cannot pass through this conversion. */
5313 || (code
!= MULT_EXPR
5314 && (TYPE_UNSIGNED (ctype
)
5315 != TYPE_UNSIGNED (TREE_TYPE (op0
))))))
5318 /* Pass the constant down and see if we can make a simplification. If
5319 we can, replace this expression with the inner simplification for
5320 possible later conversion to our or some other type. */
5321 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
5322 && TREE_CODE (t2
) == INTEGER_CST
5323 && ! TREE_CONSTANT_OVERFLOW (t2
)
5324 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
5326 ? ctype
: NULL_TREE
))))
5331 /* If widening the type changes it from signed to unsigned, then we
5332 must avoid building ABS_EXPR itself as unsigned. */
5333 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
5335 tree cstype
= (*lang_hooks
.types
.signed_type
) (ctype
);
5336 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
)) != 0)
5338 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
5339 return fold_convert (ctype
, t1
);
5343 /* If the constant is negative, we cannot simplify this. */
5344 if (tree_int_cst_sgn (c
) == -1)
5348 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0)
5349 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
5352 case MIN_EXPR
: case MAX_EXPR
:
5353 /* If widening the type changes the signedness, then we can't perform
5354 this optimization as that changes the result. */
5355 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
5358 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5359 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0
5360 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
)) != 0)
5362 if (tree_int_cst_sgn (c
) < 0)
5363 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
5365 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5366 fold_convert (ctype
, t2
));
5370 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
5371 /* If the second operand is constant, this is a multiplication
5372 or floor division, by a power of two, so we can treat it that
5373 way unless the multiplier or divisor overflows. Signed
5374 left-shift overflow is implementation-defined rather than
5375 undefined in C90, so do not convert signed left shift into
5377 if (TREE_CODE (op1
) == INTEGER_CST
5378 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
5379 /* const_binop may not detect overflow correctly,
5380 so check for it explicitly here. */
5381 && TYPE_PRECISION (TREE_TYPE (size_one_node
)) > TREE_INT_CST_LOW (op1
)
5382 && TREE_INT_CST_HIGH (op1
) == 0
5383 && 0 != (t1
= fold_convert (ctype
,
5384 const_binop (LSHIFT_EXPR
,
5387 && ! TREE_OVERFLOW (t1
))
5388 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
5389 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
5390 ctype
, fold_convert (ctype
, op0
), t1
),
5391 c
, code
, wide_type
);
5394 case PLUS_EXPR
: case MINUS_EXPR
:
5395 /* See if we can eliminate the operation on both sides. If we can, we
5396 can return a new PLUS or MINUS. If we can't, the only remaining
5397 cases where we can do anything are if the second operand is a
5399 t1
= extract_muldiv (op0
, c
, code
, wide_type
);
5400 t2
= extract_muldiv (op1
, c
, code
, wide_type
);
5401 if (t1
!= 0 && t2
!= 0
5402 && (code
== MULT_EXPR
5403 /* If not multiplication, we can only do this if both operands
5404 are divisible by c. */
5405 || (multiple_of_p (ctype
, op0
, c
)
5406 && multiple_of_p (ctype
, op1
, c
))))
5407 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5408 fold_convert (ctype
, t2
));
5410 /* If this was a subtraction, negate OP1 and set it to be an addition.
5411 This simplifies the logic below. */
5412 if (tcode
== MINUS_EXPR
)
5413 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
5415 if (TREE_CODE (op1
) != INTEGER_CST
)
5418 /* If either OP1 or C are negative, this optimization is not safe for
5419 some of the division and remainder types while for others we need
5420 to change the code. */
5421 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
5423 if (code
== CEIL_DIV_EXPR
)
5424 code
= FLOOR_DIV_EXPR
;
5425 else if (code
== FLOOR_DIV_EXPR
)
5426 code
= CEIL_DIV_EXPR
;
5427 else if (code
!= MULT_EXPR
5428 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
5432 /* If it's a multiply or a division/modulus operation of a multiple
5433 of our constant, do the operation and verify it doesn't overflow. */
5434 if (code
== MULT_EXPR
5435 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
5437 op1
= const_binop (code
, fold_convert (ctype
, op1
),
5438 fold_convert (ctype
, c
), 0);
5439 /* We allow the constant to overflow with wrapping semantics. */
5441 || (TREE_OVERFLOW (op1
) && ! flag_wrapv
))
5447 /* If we have an unsigned type is not a sizetype, we cannot widen
5448 the operation since it will change the result if the original
5449 computation overflowed. */
5450 if (TYPE_UNSIGNED (ctype
)
5451 && ! (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
))
5455 /* If we were able to eliminate our operation from the first side,
5456 apply our operation to the second side and reform the PLUS. */
5457 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
5458 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
5460 /* The last case is if we are a multiply. In that case, we can
5461 apply the distributive law to commute the multiply and addition
5462 if the multiplication of the constants doesn't overflow. */
5463 if (code
== MULT_EXPR
)
5464 return fold_build2 (tcode
, ctype
,
5465 fold_build2 (code
, ctype
,
5466 fold_convert (ctype
, op0
),
5467 fold_convert (ctype
, c
)),
5473 /* We have a special case here if we are doing something like
5474 (C * 8) % 4 since we know that's zero. */
5475 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
5476 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
5477 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
5478 && integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
5479 return omit_one_operand (type
, integer_zero_node
, op0
);
5481 /* ... fall through ... */
5483 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
5484 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
5485 /* If we can extract our operation from the LHS, do so and return a
5486 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5487 do something only if the second operand is a constant. */
5489 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0)
5490 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5491 fold_convert (ctype
, op1
));
5492 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
5493 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
)) != 0)
5494 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5495 fold_convert (ctype
, t1
));
5496 else if (TREE_CODE (op1
) != INTEGER_CST
)
5499 /* If these are the same operation types, we can associate them
5500 assuming no overflow. */
5502 && 0 != (t1
= const_binop (MULT_EXPR
, fold_convert (ctype
, op1
),
5503 fold_convert (ctype
, c
), 0))
5504 && ! TREE_OVERFLOW (t1
))
5505 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
), t1
);
5507 /* If these operations "cancel" each other, we have the main
5508 optimizations of this pass, which occur when either constant is a
5509 multiple of the other, in which case we replace this with either an
5510 operation or CODE or TCODE.
5512 If we have an unsigned type that is not a sizetype, we cannot do
5513 this since it will change the result if the original computation
5515 if ((! TYPE_UNSIGNED (ctype
)
5516 || (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
)))
5518 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
5519 || (tcode
== MULT_EXPR
5520 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
5521 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
)))
5523 if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
5524 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5525 fold_convert (ctype
,
5526 const_binop (TRUNC_DIV_EXPR
,
5528 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, c
, op1
, 0)))
5529 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
5530 fold_convert (ctype
,
5531 const_binop (TRUNC_DIV_EXPR
,
5543 /* Return a node which has the indicated constant VALUE (either 0 or
5544 1), and is of the indicated TYPE. */
5547 constant_boolean_node (int value
, tree type
)
5549 if (type
== integer_type_node
)
5550 return value
? integer_one_node
: integer_zero_node
;
5551 else if (type
== boolean_type_node
)
5552 return value
? boolean_true_node
: boolean_false_node
;
5554 return build_int_cst (type
, value
);
5558 /* Return true if expr looks like an ARRAY_REF and set base and
5559 offset to the appropriate trees. If there is no offset,
5560 offset is set to NULL_TREE. Base will be canonicalized to
5561 something you can get the element type from using
5562 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5563 in bytes to the base. */
5566 extract_array_ref (tree expr
, tree
*base
, tree
*offset
)
5568 /* One canonical form is a PLUS_EXPR with the first
5569 argument being an ADDR_EXPR with a possible NOP_EXPR
5571 if (TREE_CODE (expr
) == PLUS_EXPR
)
5573 tree op0
= TREE_OPERAND (expr
, 0);
5574 tree inner_base
, dummy1
;
5575 /* Strip NOP_EXPRs here because the C frontends and/or
5576 folders present us (int *)&x.a + 4B possibly. */
5578 if (extract_array_ref (op0
, &inner_base
, &dummy1
))
5581 if (dummy1
== NULL_TREE
)
5582 *offset
= TREE_OPERAND (expr
, 1);
5584 *offset
= fold_build2 (PLUS_EXPR
, TREE_TYPE (expr
),
5585 dummy1
, TREE_OPERAND (expr
, 1));
5589 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5590 which we transform into an ADDR_EXPR with appropriate
5591 offset. For other arguments to the ADDR_EXPR we assume
5592 zero offset and as such do not care about the ADDR_EXPR
5593 type and strip possible nops from it. */
5594 else if (TREE_CODE (expr
) == ADDR_EXPR
)
5596 tree op0
= TREE_OPERAND (expr
, 0);
5597 if (TREE_CODE (op0
) == ARRAY_REF
)
5599 tree idx
= TREE_OPERAND (op0
, 1);
5600 *base
= TREE_OPERAND (op0
, 0);
5601 *offset
= fold_build2 (MULT_EXPR
, TREE_TYPE (idx
), idx
,
5602 array_ref_element_size (op0
));
5606 /* Handle array-to-pointer decay as &a. */
5607 if (TREE_CODE (TREE_TYPE (op0
)) == ARRAY_TYPE
)
5608 *base
= TREE_OPERAND (expr
, 0);
5611 *offset
= NULL_TREE
;
5615 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5616 else if (SSA_VAR_P (expr
)
5617 && TREE_CODE (TREE_TYPE (expr
)) == POINTER_TYPE
)
5620 *offset
= NULL_TREE
;
5628 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5629 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5630 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5631 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5632 COND is the first argument to CODE; otherwise (as in the example
5633 given here), it is the second argument. TYPE is the type of the
5634 original expression. Return NULL_TREE if no simplification is
5638 fold_binary_op_with_conditional_arg (enum tree_code code
,
5639 tree type
, tree op0
, tree op1
,
5640 tree cond
, tree arg
, int cond_first_p
)
5642 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
5643 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
5644 tree test
, true_value
, false_value
;
5645 tree lhs
= NULL_TREE
;
5646 tree rhs
= NULL_TREE
;
5648 /* This transformation is only worthwhile if we don't have to wrap
5649 arg in a SAVE_EXPR, and the operation can be simplified on at least
5650 one of the branches once its pushed inside the COND_EXPR. */
5651 if (!TREE_CONSTANT (arg
))
5654 if (TREE_CODE (cond
) == COND_EXPR
)
5656 test
= TREE_OPERAND (cond
, 0);
5657 true_value
= TREE_OPERAND (cond
, 1);
5658 false_value
= TREE_OPERAND (cond
, 2);
5659 /* If this operand throws an expression, then it does not make
5660 sense to try to perform a logical or arithmetic operation
5662 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
5664 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
5669 tree testtype
= TREE_TYPE (cond
);
5671 true_value
= constant_boolean_node (true, testtype
);
5672 false_value
= constant_boolean_node (false, testtype
);
5675 arg
= fold_convert (arg_type
, arg
);
5678 true_value
= fold_convert (cond_type
, true_value
);
5680 lhs
= fold_build2 (code
, type
, true_value
, arg
);
5682 lhs
= fold_build2 (code
, type
, arg
, true_value
);
5686 false_value
= fold_convert (cond_type
, false_value
);
5688 rhs
= fold_build2 (code
, type
, false_value
, arg
);
5690 rhs
= fold_build2 (code
, type
, arg
, false_value
);
5693 test
= fold_build3 (COND_EXPR
, type
, test
, lhs
, rhs
);
5694 return fold_convert (type
, test
);
5698 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5700 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5701 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5702 ADDEND is the same as X.
5704 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5705 and finite. The problematic cases are when X is zero, and its mode
5706 has signed zeros. In the case of rounding towards -infinity,
5707 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5708 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5711 fold_real_zero_addition_p (tree type
, tree addend
, int negate
)
5713 if (!real_zerop (addend
))
5716 /* Don't allow the fold with -fsignaling-nans. */
5717 if (HONOR_SNANS (TYPE_MODE (type
)))
5720 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5721 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
5724 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5725 if (TREE_CODE (addend
) == REAL_CST
5726 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
5729 /* The mode has signed zeros, and we have to honor their sign.
5730 In this situation, there is only one case we can return true for.
5731 X - 0 is the same as X unless rounding towards -infinity is
5733 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
));
5736 /* Subroutine of fold() that checks comparisons of built-in math
5737 functions against real constants.
5739 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5740 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5741 is the type of the result and ARG0 and ARG1 are the operands of the
5742 comparison. ARG1 must be a TREE_REAL_CST.
5744 The function returns the constant folded tree if a simplification
5745 can be made, and NULL_TREE otherwise. */
5748 fold_mathfn_compare (enum built_in_function fcode
, enum tree_code code
,
5749 tree type
, tree arg0
, tree arg1
)
5753 if (BUILTIN_SQRT_P (fcode
))
5755 tree arg
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
5756 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
5758 c
= TREE_REAL_CST (arg1
);
5759 if (REAL_VALUE_NEGATIVE (c
))
5761 /* sqrt(x) < y is always false, if y is negative. */
5762 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
5763 return omit_one_operand (type
, integer_zero_node
, arg
);
5765 /* sqrt(x) > y is always true, if y is negative and we
5766 don't care about NaNs, i.e. negative values of x. */
5767 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
5768 return omit_one_operand (type
, integer_one_node
, arg
);
5770 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5771 return fold_build2 (GE_EXPR
, type
, arg
,
5772 build_real (TREE_TYPE (arg
), dconst0
));
5774 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
5778 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
5779 real_convert (&c2
, mode
, &c2
);
5781 if (REAL_VALUE_ISINF (c2
))
5783 /* sqrt(x) > y is x == +Inf, when y is very large. */
5784 if (HONOR_INFINITIES (mode
))
5785 return fold_build2 (EQ_EXPR
, type
, arg
,
5786 build_real (TREE_TYPE (arg
), c2
));
5788 /* sqrt(x) > y is always false, when y is very large
5789 and we don't care about infinities. */
5790 return omit_one_operand (type
, integer_zero_node
, arg
);
5793 /* sqrt(x) > c is the same as x > c*c. */
5794 return fold_build2 (code
, type
, arg
,
5795 build_real (TREE_TYPE (arg
), c2
));
5797 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
5801 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
5802 real_convert (&c2
, mode
, &c2
);
5804 if (REAL_VALUE_ISINF (c2
))
5806 /* sqrt(x) < y is always true, when y is a very large
5807 value and we don't care about NaNs or Infinities. */
5808 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
5809 return omit_one_operand (type
, integer_one_node
, arg
);
5811 /* sqrt(x) < y is x != +Inf when y is very large and we
5812 don't care about NaNs. */
5813 if (! HONOR_NANS (mode
))
5814 return fold_build2 (NE_EXPR
, type
, arg
,
5815 build_real (TREE_TYPE (arg
), c2
));
5817 /* sqrt(x) < y is x >= 0 when y is very large and we
5818 don't care about Infinities. */
5819 if (! HONOR_INFINITIES (mode
))
5820 return fold_build2 (GE_EXPR
, type
, arg
,
5821 build_real (TREE_TYPE (arg
), dconst0
));
5823 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5824 if (lang_hooks
.decls
.global_bindings_p () != 0
5825 || CONTAINS_PLACEHOLDER_P (arg
))
5828 arg
= save_expr (arg
);
5829 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
5830 fold_build2 (GE_EXPR
, type
, arg
,
5831 build_real (TREE_TYPE (arg
),
5833 fold_build2 (NE_EXPR
, type
, arg
,
5834 build_real (TREE_TYPE (arg
),
5838 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5839 if (! HONOR_NANS (mode
))
5840 return fold_build2 (code
, type
, arg
,
5841 build_real (TREE_TYPE (arg
), c2
));
5843 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5844 if (lang_hooks
.decls
.global_bindings_p () == 0
5845 && ! CONTAINS_PLACEHOLDER_P (arg
))
5847 arg
= save_expr (arg
);
5848 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
5849 fold_build2 (GE_EXPR
, type
, arg
,
5850 build_real (TREE_TYPE (arg
),
5852 fold_build2 (code
, type
, arg
,
5853 build_real (TREE_TYPE (arg
),
5862 /* Subroutine of fold() that optimizes comparisons against Infinities,
5863 either +Inf or -Inf.
5865 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5866 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5867 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5869 The function returns the constant folded tree if a simplification
5870 can be made, and NULL_TREE otherwise. */
5873 fold_inf_compare (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
5875 enum machine_mode mode
;
5876 REAL_VALUE_TYPE max
;
5880 mode
= TYPE_MODE (TREE_TYPE (arg0
));
5882 /* For negative infinity swap the sense of the comparison. */
5883 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
5885 code
= swap_tree_comparison (code
);
5890 /* x > +Inf is always false, if with ignore sNANs. */
5891 if (HONOR_SNANS (mode
))
5893 return omit_one_operand (type
, integer_zero_node
, arg0
);
5896 /* x <= +Inf is always true, if we don't case about NaNs. */
5897 if (! HONOR_NANS (mode
))
5898 return omit_one_operand (type
, integer_one_node
, arg0
);
5900 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5901 if (lang_hooks
.decls
.global_bindings_p () == 0
5902 && ! CONTAINS_PLACEHOLDER_P (arg0
))
5904 arg0
= save_expr (arg0
);
5905 return fold_build2 (EQ_EXPR
, type
, arg0
, arg0
);
5911 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5912 real_maxval (&max
, neg
, mode
);
5913 return fold_build2 (neg
? LT_EXPR
: GT_EXPR
, type
,
5914 arg0
, build_real (TREE_TYPE (arg0
), max
));
5917 /* x < +Inf is always equal to x <= DBL_MAX. */
5918 real_maxval (&max
, neg
, mode
);
5919 return fold_build2 (neg
? GE_EXPR
: LE_EXPR
, type
,
5920 arg0
, build_real (TREE_TYPE (arg0
), max
));
5923 /* x != +Inf is always equal to !(x > DBL_MAX). */
5924 real_maxval (&max
, neg
, mode
);
5925 if (! HONOR_NANS (mode
))
5926 return fold_build2 (neg
? GE_EXPR
: LE_EXPR
, type
,
5927 arg0
, build_real (TREE_TYPE (arg0
), max
));
5929 /* The transformation below creates non-gimple code and thus is
5930 not appropriate if we are in gimple form. */
5934 temp
= fold_build2 (neg
? LT_EXPR
: GT_EXPR
, type
,
5935 arg0
, build_real (TREE_TYPE (arg0
), max
));
5936 return fold_build1 (TRUTH_NOT_EXPR
, type
, temp
);
5945 /* Subroutine of fold() that optimizes comparisons of a division by
5946 a nonzero integer constant against an integer constant, i.e.
5949 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5950 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5951 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5953 The function returns the constant folded tree if a simplification
5954 can be made, and NULL_TREE otherwise. */
5957 fold_div_compare (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
5959 tree prod
, tmp
, hi
, lo
;
5960 tree arg00
= TREE_OPERAND (arg0
, 0);
5961 tree arg01
= TREE_OPERAND (arg0
, 1);
5962 unsigned HOST_WIDE_INT lpart
;
5963 HOST_WIDE_INT hpart
;
5964 bool unsigned_p
= TYPE_UNSIGNED (TREE_TYPE (arg0
));
5967 /* We have to do this the hard way to detect unsigned overflow.
5968 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5969 overflow
= mul_double_with_sign (TREE_INT_CST_LOW (arg01
),
5970 TREE_INT_CST_HIGH (arg01
),
5971 TREE_INT_CST_LOW (arg1
),
5972 TREE_INT_CST_HIGH (arg1
),
5973 &lpart
, &hpart
, unsigned_p
);
5974 prod
= build_int_cst_wide (TREE_TYPE (arg00
), lpart
, hpart
);
5975 prod
= force_fit_type (prod
, -1, overflow
, false);
5979 tmp
= int_const_binop (MINUS_EXPR
, arg01
, integer_one_node
, 0);
5982 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5983 overflow
= add_double_with_sign (TREE_INT_CST_LOW (prod
),
5984 TREE_INT_CST_HIGH (prod
),
5985 TREE_INT_CST_LOW (tmp
),
5986 TREE_INT_CST_HIGH (tmp
),
5987 &lpart
, &hpart
, unsigned_p
);
5988 hi
= build_int_cst_wide (TREE_TYPE (arg00
), lpart
, hpart
);
5989 hi
= force_fit_type (hi
, -1, overflow
| TREE_OVERFLOW (prod
),
5990 TREE_CONSTANT_OVERFLOW (prod
));
5992 else if (tree_int_cst_sgn (arg01
) >= 0)
5994 tmp
= int_const_binop (MINUS_EXPR
, arg01
, integer_one_node
, 0);
5995 switch (tree_int_cst_sgn (arg1
))
5998 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
, 0);
6003 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6008 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
, 0);
6018 /* A negative divisor reverses the relational operators. */
6019 code
= swap_tree_comparison (code
);
6021 tmp
= int_const_binop (PLUS_EXPR
, arg01
, integer_one_node
, 0);
6022 switch (tree_int_cst_sgn (arg1
))
6025 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
, 0);
6030 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6035 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
, 0);
6047 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6048 return omit_one_operand (type
, integer_zero_node
, arg00
);
6049 if (TREE_OVERFLOW (hi
))
6050 return fold_build2 (GE_EXPR
, type
, arg00
, lo
);
6051 if (TREE_OVERFLOW (lo
))
6052 return fold_build2 (LE_EXPR
, type
, arg00
, hi
);
6053 return build_range_check (type
, arg00
, 1, lo
, hi
);
6056 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6057 return omit_one_operand (type
, integer_one_node
, arg00
);
6058 if (TREE_OVERFLOW (hi
))
6059 return fold_build2 (LT_EXPR
, type
, arg00
, lo
);
6060 if (TREE_OVERFLOW (lo
))
6061 return fold_build2 (GT_EXPR
, type
, arg00
, hi
);
6062 return build_range_check (type
, arg00
, 0, lo
, hi
);
6065 if (TREE_OVERFLOW (lo
))
6066 return omit_one_operand (type
, integer_zero_node
, arg00
);
6067 return fold_build2 (LT_EXPR
, type
, arg00
, lo
);
6070 if (TREE_OVERFLOW (hi
))
6071 return omit_one_operand (type
, integer_one_node
, arg00
);
6072 return fold_build2 (LE_EXPR
, type
, arg00
, hi
);
6075 if (TREE_OVERFLOW (hi
))
6076 return omit_one_operand (type
, integer_zero_node
, arg00
);
6077 return fold_build2 (GT_EXPR
, type
, arg00
, hi
);
6080 if (TREE_OVERFLOW (lo
))
6081 return omit_one_operand (type
, integer_one_node
, arg00
);
6082 return fold_build2 (GE_EXPR
, type
, arg00
, lo
);
6092 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6093 equality/inequality test, then return a simplified form of the test
6094 using a sign testing. Otherwise return NULL. TYPE is the desired
6098 fold_single_bit_test_into_sign_test (enum tree_code code
, tree arg0
, tree arg1
,
6101 /* If this is testing a single bit, we can optimize the test. */
6102 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6103 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6104 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6106 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6107 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6108 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6110 if (arg00
!= NULL_TREE
6111 /* This is only a win if casting to a signed type is cheap,
6112 i.e. when arg00's type is not a partial mode. */
6113 && TYPE_PRECISION (TREE_TYPE (arg00
))
6114 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00
))))
6116 tree stype
= lang_hooks
.types
.signed_type (TREE_TYPE (arg00
));
6117 return fold_build2 (code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6118 result_type
, fold_convert (stype
, arg00
),
6119 fold_convert (stype
, integer_zero_node
));
6126 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6127 equality/inequality test, then return a simplified form of
6128 the test using shifts and logical operations. Otherwise return
6129 NULL. TYPE is the desired result type. */
6132 fold_single_bit_test (enum tree_code code
, tree arg0
, tree arg1
,
6135 /* If this is testing a single bit, we can optimize the test. */
6136 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6137 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6138 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6140 tree inner
= TREE_OPERAND (arg0
, 0);
6141 tree type
= TREE_TYPE (arg0
);
6142 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6143 enum machine_mode operand_mode
= TYPE_MODE (type
);
6145 tree signed_type
, unsigned_type
, intermediate_type
;
6148 /* First, see if we can fold the single bit test into a sign-bit
6150 tem
= fold_single_bit_test_into_sign_test (code
, arg0
, arg1
,
6155 /* Otherwise we have (A & C) != 0 where C is a single bit,
6156 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6157 Similarly for (A & C) == 0. */
6159 /* If INNER is a right shift of a constant and it plus BITNUM does
6160 not overflow, adjust BITNUM and INNER. */
6161 if (TREE_CODE (inner
) == RSHIFT_EXPR
6162 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6163 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
6164 && bitnum
< TYPE_PRECISION (type
)
6165 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
6166 bitnum
- TYPE_PRECISION (type
)))
6168 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
6169 inner
= TREE_OPERAND (inner
, 0);
6172 /* If we are going to be able to omit the AND below, we must do our
6173 operations as unsigned. If we must use the AND, we have a choice.
6174 Normally unsigned is faster, but for some machines signed is. */
6175 #ifdef LOAD_EXTEND_OP
6176 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
6177 && !flag_syntax_only
) ? 0 : 1;
6182 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6183 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6184 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6185 inner
= fold_convert (intermediate_type
, inner
);
6188 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6189 inner
, size_int (bitnum
));
6191 if (code
== EQ_EXPR
)
6192 inner
= fold_build2 (BIT_XOR_EXPR
, intermediate_type
,
6193 inner
, integer_one_node
);
6195 /* Put the AND last so it can combine with more things. */
6196 inner
= build2 (BIT_AND_EXPR
, intermediate_type
,
6197 inner
, integer_one_node
);
6199 /* Make sure to return the proper type. */
6200 inner
= fold_convert (result_type
, inner
);
6207 /* Check whether we are allowed to reorder operands arg0 and arg1,
6208 such that the evaluation of arg1 occurs before arg0. */
6211 reorder_operands_p (tree arg0
, tree arg1
)
6213 if (! flag_evaluation_order
)
6215 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6217 return ! TREE_SIDE_EFFECTS (arg0
)
6218 && ! TREE_SIDE_EFFECTS (arg1
);
6221 /* Test whether it is preferable two swap two operands, ARG0 and
6222 ARG1, for example because ARG0 is an integer constant and ARG1
6223 isn't. If REORDER is true, only recommend swapping if we can
6224 evaluate the operands in reverse order. */
6227 tree_swap_operands_p (tree arg0
, tree arg1
, bool reorder
)
6229 STRIP_SIGN_NOPS (arg0
);
6230 STRIP_SIGN_NOPS (arg1
);
6232 if (TREE_CODE (arg1
) == INTEGER_CST
)
6234 if (TREE_CODE (arg0
) == INTEGER_CST
)
6237 if (TREE_CODE (arg1
) == REAL_CST
)
6239 if (TREE_CODE (arg0
) == REAL_CST
)
6242 if (TREE_CODE (arg1
) == COMPLEX_CST
)
6244 if (TREE_CODE (arg0
) == COMPLEX_CST
)
6247 if (TREE_CONSTANT (arg1
))
6249 if (TREE_CONSTANT (arg0
))
6255 if (reorder
&& flag_evaluation_order
6256 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
6264 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6265 for commutative and comparison operators. Ensuring a canonical
6266 form allows the optimizers to find additional redundancies without
6267 having to explicitly check for both orderings. */
6268 if (TREE_CODE (arg0
) == SSA_NAME
6269 && TREE_CODE (arg1
) == SSA_NAME
6270 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6276 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6277 ARG0 is extended to a wider type. */
6280 fold_widened_comparison (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6282 tree arg0_unw
= get_unwidened (arg0
, NULL_TREE
);
6284 tree shorter_type
, outer_type
;
6288 if (arg0_unw
== arg0
)
6290 shorter_type
= TREE_TYPE (arg0_unw
);
6292 #ifdef HAVE_canonicalize_funcptr_for_compare
6293 /* Disable this optimization if we're casting a function pointer
6294 type on targets that require function pointer canonicalization. */
6295 if (HAVE_canonicalize_funcptr_for_compare
6296 && TREE_CODE (shorter_type
) == POINTER_TYPE
6297 && TREE_CODE (TREE_TYPE (shorter_type
)) == FUNCTION_TYPE
)
6301 if (TYPE_PRECISION (TREE_TYPE (arg0
)) <= TYPE_PRECISION (shorter_type
))
6304 arg1_unw
= get_unwidened (arg1
, NULL_TREE
);
6306 /* If possible, express the comparison in the shorter mode. */
6307 if ((code
== EQ_EXPR
|| code
== NE_EXPR
6308 || TYPE_UNSIGNED (TREE_TYPE (arg0
)) == TYPE_UNSIGNED (shorter_type
))
6309 && (TREE_TYPE (arg1_unw
) == shorter_type
6310 || (TYPE_PRECISION (shorter_type
)
6311 > TYPE_PRECISION (TREE_TYPE (arg1_unw
)))
6312 || ((TYPE_PRECISION (shorter_type
)
6313 == TYPE_PRECISION (TREE_TYPE (arg1_unw
)))
6314 && (TYPE_UNSIGNED (shorter_type
)
6315 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw
))))
6316 || (TREE_CODE (arg1_unw
) == INTEGER_CST
6317 && (TREE_CODE (shorter_type
) == INTEGER_TYPE
6318 || TREE_CODE (shorter_type
) == BOOLEAN_TYPE
)
6319 && int_fits_type_p (arg1_unw
, shorter_type
))))
6320 return fold_build2 (code
, type
, arg0_unw
,
6321 fold_convert (shorter_type
, arg1_unw
));
6323 if (TREE_CODE (arg1_unw
) != INTEGER_CST
6324 || TREE_CODE (shorter_type
) != INTEGER_TYPE
6325 || !int_fits_type_p (arg1_unw
, shorter_type
))
6328 /* If we are comparing with the integer that does not fit into the range
6329 of the shorter type, the result is known. */
6330 outer_type
= TREE_TYPE (arg1_unw
);
6331 min
= lower_bound_in_type (outer_type
, shorter_type
);
6332 max
= upper_bound_in_type (outer_type
, shorter_type
);
6334 above
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6336 below
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6343 return omit_one_operand (type
, integer_zero_node
, arg0
);
6348 return omit_one_operand (type
, integer_one_node
, arg0
);
6354 return omit_one_operand (type
, integer_one_node
, arg0
);
6356 return omit_one_operand (type
, integer_zero_node
, arg0
);
6361 return omit_one_operand (type
, integer_zero_node
, arg0
);
6363 return omit_one_operand (type
, integer_one_node
, arg0
);
6372 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6373 ARG0 just the signedness is changed. */
6376 fold_sign_changed_comparison (enum tree_code code
, tree type
,
6377 tree arg0
, tree arg1
)
6379 tree arg0_inner
, tmp
;
6380 tree inner_type
, outer_type
;
6382 if (TREE_CODE (arg0
) != NOP_EXPR
6383 && TREE_CODE (arg0
) != CONVERT_EXPR
)
6386 outer_type
= TREE_TYPE (arg0
);
6387 arg0_inner
= TREE_OPERAND (arg0
, 0);
6388 inner_type
= TREE_TYPE (arg0_inner
);
6390 #ifdef HAVE_canonicalize_funcptr_for_compare
6391 /* Disable this optimization if we're casting a function pointer
6392 type on targets that require function pointer canonicalization. */
6393 if (HAVE_canonicalize_funcptr_for_compare
6394 && TREE_CODE (inner_type
) == POINTER_TYPE
6395 && TREE_CODE (TREE_TYPE (inner_type
)) == FUNCTION_TYPE
)
6399 if (TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
6402 if (TREE_CODE (arg1
) != INTEGER_CST
6403 && !((TREE_CODE (arg1
) == NOP_EXPR
6404 || TREE_CODE (arg1
) == CONVERT_EXPR
)
6405 && TREE_TYPE (TREE_OPERAND (arg1
, 0)) == inner_type
))
6408 if (TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
6413 if (TREE_CODE (arg1
) == INTEGER_CST
)
6415 tmp
= build_int_cst_wide (inner_type
,
6416 TREE_INT_CST_LOW (arg1
),
6417 TREE_INT_CST_HIGH (arg1
));
6418 arg1
= force_fit_type (tmp
, 0,
6419 TREE_OVERFLOW (arg1
),
6420 TREE_CONSTANT_OVERFLOW (arg1
));
6423 arg1
= fold_convert (inner_type
, arg1
);
6425 return fold_build2 (code
, type
, arg0_inner
, arg1
);
6428 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6429 step of the array. Reconstructs s and delta in the case of s * delta
6430 being an integer constant (and thus already folded).
6431 ADDR is the address. MULT is the multiplicative expression.
6432 If the function succeeds, the new address expression is returned. Otherwise
6433 NULL_TREE is returned. */
6436 try_move_mult_to_index (enum tree_code code
, tree addr
, tree op1
)
6438 tree s
, delta
, step
;
6439 tree ref
= TREE_OPERAND (addr
, 0), pref
;
6443 /* Canonicalize op1 into a possibly non-constant delta
6444 and an INTEGER_CST s. */
6445 if (TREE_CODE (op1
) == MULT_EXPR
)
6447 tree arg0
= TREE_OPERAND (op1
, 0), arg1
= TREE_OPERAND (op1
, 1);
6452 if (TREE_CODE (arg0
) == INTEGER_CST
)
6457 else if (TREE_CODE (arg1
) == INTEGER_CST
)
6465 else if (TREE_CODE (op1
) == INTEGER_CST
)
6472 /* Simulate we are delta * 1. */
6474 s
= integer_one_node
;
6477 for (;; ref
= TREE_OPERAND (ref
, 0))
6479 if (TREE_CODE (ref
) == ARRAY_REF
)
6481 itype
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref
, 0)));
6485 step
= array_ref_element_size (ref
);
6486 if (TREE_CODE (step
) != INTEGER_CST
)
6491 if (! tree_int_cst_equal (step
, s
))
6496 /* Try if delta is a multiple of step. */
6497 tree tmp
= div_if_zero_remainder (EXACT_DIV_EXPR
, delta
, step
);
6506 if (!handled_component_p (ref
))
6510 /* We found the suitable array reference. So copy everything up to it,
6511 and replace the index. */
6513 pref
= TREE_OPERAND (addr
, 0);
6514 ret
= copy_node (pref
);
6519 pref
= TREE_OPERAND (pref
, 0);
6520 TREE_OPERAND (pos
, 0) = copy_node (pref
);
6521 pos
= TREE_OPERAND (pos
, 0);
6524 TREE_OPERAND (pos
, 1) = fold_build2 (code
, itype
,
6525 fold_convert (itype
,
6526 TREE_OPERAND (pos
, 1)),
6527 fold_convert (itype
, delta
));
6529 return fold_build1 (ADDR_EXPR
, TREE_TYPE (addr
), ret
);
6533 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6534 means A >= Y && A != MAX, but in this case we know that
6535 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6538 fold_to_nonsharp_ineq_using_bound (tree ineq
, tree bound
)
6540 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
6542 if (TREE_CODE (bound
) == LT_EXPR
)
6543 a
= TREE_OPERAND (bound
, 0);
6544 else if (TREE_CODE (bound
) == GT_EXPR
)
6545 a
= TREE_OPERAND (bound
, 1);
6549 typea
= TREE_TYPE (a
);
6550 if (!INTEGRAL_TYPE_P (typea
)
6551 && !POINTER_TYPE_P (typea
))
6554 if (TREE_CODE (ineq
) == LT_EXPR
)
6556 a1
= TREE_OPERAND (ineq
, 1);
6557 y
= TREE_OPERAND (ineq
, 0);
6559 else if (TREE_CODE (ineq
) == GT_EXPR
)
6561 a1
= TREE_OPERAND (ineq
, 0);
6562 y
= TREE_OPERAND (ineq
, 1);
6567 if (TREE_TYPE (a1
) != typea
)
6570 diff
= fold_build2 (MINUS_EXPR
, typea
, a1
, a
);
6571 if (!integer_onep (diff
))
6574 return fold_build2 (GE_EXPR
, type
, a
, y
);
6577 /* Fold a unary expression of code CODE and type TYPE with operand
6578 OP0. Return the folded expression if folding is successful.
6579 Otherwise, return NULL_TREE. */
6582 fold_unary (enum tree_code code
, tree type
, tree op0
)
6586 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
6588 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
6589 && TREE_CODE_LENGTH (code
) == 1);
6594 if (code
== NOP_EXPR
|| code
== CONVERT_EXPR
6595 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
)
6597 /* Don't use STRIP_NOPS, because signedness of argument type
6599 STRIP_SIGN_NOPS (arg0
);
6603 /* Strip any conversions that don't change the mode. This
6604 is safe for every expression, except for a comparison
6605 expression because its signedness is derived from its
6608 Note that this is done as an internal manipulation within
6609 the constant folder, in order to find the simplest
6610 representation of the arguments so that their form can be
6611 studied. In any cases, the appropriate type conversions
6612 should be put back in the tree that will get out of the
6618 if (TREE_CODE_CLASS (code
) == tcc_unary
)
6620 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
6621 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
6622 fold_build1 (code
, type
, TREE_OPERAND (arg0
, 1)));
6623 else if (TREE_CODE (arg0
) == COND_EXPR
)
6625 tree arg01
= TREE_OPERAND (arg0
, 1);
6626 tree arg02
= TREE_OPERAND (arg0
, 2);
6627 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
6628 arg01
= fold_build1 (code
, type
, arg01
);
6629 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
6630 arg02
= fold_build1 (code
, type
, arg02
);
6631 tem
= fold_build3 (COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
6634 /* If this was a conversion, and all we did was to move into
6635 inside the COND_EXPR, bring it back out. But leave it if
6636 it is a conversion from integer to integer and the
6637 result precision is no wider than a word since such a
6638 conversion is cheap and may be optimized away by combine,
6639 while it couldn't if it were outside the COND_EXPR. Then return
6640 so we don't get into an infinite recursion loop taking the
6641 conversion out and then back in. */
6643 if ((code
== NOP_EXPR
|| code
== CONVERT_EXPR
6644 || code
== NON_LVALUE_EXPR
)
6645 && TREE_CODE (tem
) == COND_EXPR
6646 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
6647 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
6648 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
6649 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
6650 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
6651 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
6652 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
6654 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
6655 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
6656 || flag_syntax_only
))
6657 tem
= build1 (code
, type
,
6659 TREE_TYPE (TREE_OPERAND
6660 (TREE_OPERAND (tem
, 1), 0)),
6661 TREE_OPERAND (tem
, 0),
6662 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
6663 TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)));
6666 else if (COMPARISON_CLASS_P (arg0
))
6668 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
6670 arg0
= copy_node (arg0
);
6671 TREE_TYPE (arg0
) = type
;
6674 else if (TREE_CODE (type
) != INTEGER_TYPE
)
6675 return fold_build3 (COND_EXPR
, type
, arg0
,
6676 fold_build1 (code
, type
,
6678 fold_build1 (code
, type
,
6679 integer_zero_node
));
6688 case FIX_TRUNC_EXPR
:
6690 case FIX_FLOOR_EXPR
:
6691 case FIX_ROUND_EXPR
:
6692 if (TREE_TYPE (op0
) == type
)
6695 /* Handle cases of two conversions in a row. */
6696 if (TREE_CODE (op0
) == NOP_EXPR
6697 || TREE_CODE (op0
) == CONVERT_EXPR
)
6699 tree inside_type
= TREE_TYPE (TREE_OPERAND (op0
, 0));
6700 tree inter_type
= TREE_TYPE (op0
);
6701 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
6702 int inside_ptr
= POINTER_TYPE_P (inside_type
);
6703 int inside_float
= FLOAT_TYPE_P (inside_type
);
6704 int inside_vec
= TREE_CODE (inside_type
) == VECTOR_TYPE
;
6705 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
6706 int inside_unsignedp
= TYPE_UNSIGNED (inside_type
);
6707 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
6708 int inter_ptr
= POINTER_TYPE_P (inter_type
);
6709 int inter_float
= FLOAT_TYPE_P (inter_type
);
6710 int inter_vec
= TREE_CODE (inter_type
) == VECTOR_TYPE
;
6711 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
6712 int inter_unsignedp
= TYPE_UNSIGNED (inter_type
);
6713 int final_int
= INTEGRAL_TYPE_P (type
);
6714 int final_ptr
= POINTER_TYPE_P (type
);
6715 int final_float
= FLOAT_TYPE_P (type
);
6716 int final_vec
= TREE_CODE (type
) == VECTOR_TYPE
;
6717 unsigned int final_prec
= TYPE_PRECISION (type
);
6718 int final_unsignedp
= TYPE_UNSIGNED (type
);
6720 /* In addition to the cases of two conversions in a row
6721 handled below, if we are converting something to its own
6722 type via an object of identical or wider precision, neither
6723 conversion is needed. */
6724 if (TYPE_MAIN_VARIANT (inside_type
) == TYPE_MAIN_VARIANT (type
)
6725 && ((inter_int
&& final_int
) || (inter_float
&& final_float
))
6726 && inter_prec
>= final_prec
)
6727 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
6729 /* Likewise, if the intermediate and final types are either both
6730 float or both integer, we don't need the middle conversion if
6731 it is wider than the final type and doesn't change the signedness
6732 (for integers). Avoid this if the final type is a pointer
6733 since then we sometimes need the inner conversion. Likewise if
6734 the outer has a precision not equal to the size of its mode. */
6735 if ((((inter_int
|| inter_ptr
) && (inside_int
|| inside_ptr
))
6736 || (inter_float
&& inside_float
)
6737 || (inter_vec
&& inside_vec
))
6738 && inter_prec
>= inside_prec
6739 && (inter_float
|| inter_vec
6740 || inter_unsignedp
== inside_unsignedp
)
6741 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
6742 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
6744 && (! final_vec
|| inter_prec
== inside_prec
))
6745 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
6747 /* If we have a sign-extension of a zero-extended value, we can
6748 replace that by a single zero-extension. */
6749 if (inside_int
&& inter_int
&& final_int
6750 && inside_prec
< inter_prec
&& inter_prec
< final_prec
6751 && inside_unsignedp
&& !inter_unsignedp
)
6752 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
6754 /* Two conversions in a row are not needed unless:
6755 - some conversion is floating-point (overstrict for now), or
6756 - some conversion is a vector (overstrict for now), or
6757 - the intermediate type is narrower than both initial and
6759 - the intermediate type and innermost type differ in signedness,
6760 and the outermost type is wider than the intermediate, or
6761 - the initial type is a pointer type and the precisions of the
6762 intermediate and final types differ, or
6763 - the final type is a pointer type and the precisions of the
6764 initial and intermediate types differ. */
6765 if (! inside_float
&& ! inter_float
&& ! final_float
6766 && ! inside_vec
&& ! inter_vec
&& ! final_vec
6767 && (inter_prec
> inside_prec
|| inter_prec
> final_prec
)
6768 && ! (inside_int
&& inter_int
6769 && inter_unsignedp
!= inside_unsignedp
6770 && inter_prec
< final_prec
)
6771 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
6772 == (final_unsignedp
&& final_prec
> inter_prec
))
6773 && ! (inside_ptr
&& inter_prec
!= final_prec
)
6774 && ! (final_ptr
&& inside_prec
!= inter_prec
)
6775 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
6776 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
6778 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
6781 /* Handle (T *)&A.B.C for A being of type T and B and C
6782 living at offset zero. This occurs frequently in
6783 C++ upcasting and then accessing the base. */
6784 if (TREE_CODE (op0
) == ADDR_EXPR
6785 && POINTER_TYPE_P (type
)
6786 && handled_component_p (TREE_OPERAND (op0
, 0)))
6788 HOST_WIDE_INT bitsize
, bitpos
;
6790 enum machine_mode mode
;
6791 int unsignedp
, volatilep
;
6792 tree base
= TREE_OPERAND (op0
, 0);
6793 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
6794 &mode
, &unsignedp
, &volatilep
, false);
6795 /* If the reference was to a (constant) zero offset, we can use
6796 the address of the base if it has the same base type
6797 as the result type. */
6798 if (! offset
&& bitpos
== 0
6799 && TYPE_MAIN_VARIANT (TREE_TYPE (type
))
6800 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
6801 return fold_convert (type
, build_fold_addr_expr (base
));
6804 if (TREE_CODE (op0
) == MODIFY_EXPR
6805 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
6806 /* Detect assigning a bitfield. */
6807 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
6808 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
6810 /* Don't leave an assignment inside a conversion
6811 unless assigning a bitfield. */
6812 tem
= fold_build1 (code
, type
, TREE_OPERAND (op0
, 1));
6813 /* First do the assignment, then return converted constant. */
6814 tem
= build2 (COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
6815 TREE_NO_WARNING (tem
) = 1;
6816 TREE_USED (tem
) = 1;
6820 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6821 constants (if x has signed type, the sign bit cannot be set
6822 in c). This folds extension into the BIT_AND_EXPR. */
6823 if (INTEGRAL_TYPE_P (type
)
6824 && TREE_CODE (type
) != BOOLEAN_TYPE
6825 && TREE_CODE (op0
) == BIT_AND_EXPR
6826 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
6829 tree and0
= TREE_OPERAND (and, 0), and1
= TREE_OPERAND (and, 1);
6832 if (TYPE_UNSIGNED (TREE_TYPE (and))
6833 || (TYPE_PRECISION (type
)
6834 <= TYPE_PRECISION (TREE_TYPE (and))))
6836 else if (TYPE_PRECISION (TREE_TYPE (and1
))
6837 <= HOST_BITS_PER_WIDE_INT
6838 && host_integerp (and1
, 1))
6840 unsigned HOST_WIDE_INT cst
;
6842 cst
= tree_low_cst (and1
, 1);
6843 cst
&= (HOST_WIDE_INT
) -1
6844 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
6845 change
= (cst
== 0);
6846 #ifdef LOAD_EXTEND_OP
6848 && !flag_syntax_only
6849 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
6852 tree uns
= lang_hooks
.types
.unsigned_type (TREE_TYPE (and0
));
6853 and0
= fold_convert (uns
, and0
);
6854 and1
= fold_convert (uns
, and1
);
6860 tem
= build_int_cst_wide (type
, TREE_INT_CST_LOW (and1
),
6861 TREE_INT_CST_HIGH (and1
));
6862 tem
= force_fit_type (tem
, 0, TREE_OVERFLOW (and1
),
6863 TREE_CONSTANT_OVERFLOW (and1
));
6864 return fold_build2 (BIT_AND_EXPR
, type
,
6865 fold_convert (type
, and0
), tem
);
6869 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6870 T2 being pointers to types of the same size. */
6871 if (POINTER_TYPE_P (type
)
6872 && BINARY_CLASS_P (arg0
)
6873 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
6874 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
6876 tree arg00
= TREE_OPERAND (arg0
, 0);
6878 tree t1
= TREE_TYPE (arg00
);
6879 tree tt0
= TREE_TYPE (t0
);
6880 tree tt1
= TREE_TYPE (t1
);
6881 tree s0
= TYPE_SIZE (tt0
);
6882 tree s1
= TYPE_SIZE (tt1
);
6884 if (s0
&& s1
&& operand_equal_p (s0
, s1
, OEP_ONLY_CONST
))
6885 return build2 (TREE_CODE (arg0
), t0
, fold_convert (t0
, arg00
),
6886 TREE_OPERAND (arg0
, 1));
6889 tem
= fold_convert_const (code
, type
, arg0
);
6890 return tem
? tem
: NULL_TREE
;
6892 case VIEW_CONVERT_EXPR
:
6893 if (TREE_CODE (op0
) == VIEW_CONVERT_EXPR
)
6894 return build1 (VIEW_CONVERT_EXPR
, type
, TREE_OPERAND (op0
, 0));
6898 if (negate_expr_p (arg0
))
6899 return fold_convert (type
, negate_expr (arg0
));
6900 /* Convert - (~A) to A + 1. */
6901 if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == BIT_NOT_EXPR
)
6902 return fold_build2 (PLUS_EXPR
, type
, TREE_OPERAND (arg0
, 0),
6903 build_int_cst (type
, 1));
6907 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
6908 return fold_abs_const (arg0
, type
);
6909 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
6910 return fold_build1 (ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0));
6911 /* Convert fabs((double)float) into (double)fabsf(float). */
6912 else if (TREE_CODE (arg0
) == NOP_EXPR
6913 && TREE_CODE (type
) == REAL_TYPE
)
6915 tree targ0
= strip_float_extensions (arg0
);
6917 return fold_convert (type
, fold_build1 (ABS_EXPR
,
6921 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
6922 else if (tree_expr_nonnegative_p (arg0
) || TREE_CODE (arg0
) == ABS_EXPR
)
6925 /* Strip sign ops from argument. */
6926 if (TREE_CODE (type
) == REAL_TYPE
)
6928 tem
= fold_strip_sign_ops (arg0
);
6930 return fold_build1 (ABS_EXPR
, type
, fold_convert (type
, tem
));
6935 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
6936 return fold_convert (type
, arg0
);
6937 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
6938 return build2 (COMPLEX_EXPR
, type
,
6939 TREE_OPERAND (arg0
, 0),
6940 negate_expr (TREE_OPERAND (arg0
, 1)));
6941 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
6942 return build_complex (type
, TREE_REALPART (arg0
),
6943 negate_expr (TREE_IMAGPART (arg0
)));
6944 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
6945 return fold_build2 (TREE_CODE (arg0
), type
,
6946 fold_build1 (CONJ_EXPR
, type
,
6947 TREE_OPERAND (arg0
, 0)),
6948 fold_build1 (CONJ_EXPR
, type
,
6949 TREE_OPERAND (arg0
, 1)));
6950 else if (TREE_CODE (arg0
) == CONJ_EXPR
)
6951 return TREE_OPERAND (arg0
, 0);
6955 if (TREE_CODE (arg0
) == INTEGER_CST
)
6956 return fold_not_const (arg0
, type
);
6957 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
6958 return TREE_OPERAND (arg0
, 0);
6959 /* Convert ~ (-A) to A - 1. */
6960 else if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == NEGATE_EXPR
)
6961 return fold_build2 (MINUS_EXPR
, type
, TREE_OPERAND (arg0
, 0),
6962 build_int_cst (type
, 1));
6963 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
6964 else if (INTEGRAL_TYPE_P (type
)
6965 && ((TREE_CODE (arg0
) == MINUS_EXPR
6966 && integer_onep (TREE_OPERAND (arg0
, 1)))
6967 || (TREE_CODE (arg0
) == PLUS_EXPR
6968 && integer_all_onesp (TREE_OPERAND (arg0
, 1)))))
6969 return fold_build1 (NEGATE_EXPR
, type
, TREE_OPERAND (arg0
, 0));
6970 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
6971 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
6972 && (tem
= fold_unary (BIT_NOT_EXPR
, type
,
6974 TREE_OPERAND (arg0
, 0)))))
6975 return fold_build2 (BIT_XOR_EXPR
, type
, tem
,
6976 fold_convert (type
, TREE_OPERAND (arg0
, 1)));
6977 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
6978 && (tem
= fold_unary (BIT_NOT_EXPR
, type
,
6980 TREE_OPERAND (arg0
, 1)))))
6981 return fold_build2 (BIT_XOR_EXPR
, type
,
6982 fold_convert (type
, TREE_OPERAND (arg0
, 0)), tem
);
6986 case TRUTH_NOT_EXPR
:
6987 /* The argument to invert_truthvalue must have Boolean type. */
6988 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
6989 arg0
= fold_convert (boolean_type_node
, arg0
);
6991 /* Note that the operand of this must be an int
6992 and its values must be 0 or 1.
6993 ("true" is a fixed value perhaps depending on the language,
6994 but we don't handle values other than 1 correctly yet.) */
6995 tem
= invert_truthvalue (arg0
);
6996 /* Avoid infinite recursion. */
6997 if (TREE_CODE (tem
) == TRUTH_NOT_EXPR
)
6999 return fold_convert (type
, tem
);
7002 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
7004 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
7005 return omit_one_operand (type
, TREE_OPERAND (arg0
, 0),
7006 TREE_OPERAND (arg0
, 1));
7007 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
7008 return TREE_REALPART (arg0
);
7009 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
7010 return fold_build2 (TREE_CODE (arg0
), type
,
7011 fold_build1 (REALPART_EXPR
, type
,
7012 TREE_OPERAND (arg0
, 0)),
7013 fold_build1 (REALPART_EXPR
, type
,
7014 TREE_OPERAND (arg0
, 1)));
7018 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
7019 return fold_convert (type
, integer_zero_node
);
7020 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
7021 return omit_one_operand (type
, TREE_OPERAND (arg0
, 1),
7022 TREE_OPERAND (arg0
, 0));
7023 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
7024 return TREE_IMAGPART (arg0
);
7025 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
7026 return fold_build2 (TREE_CODE (arg0
), type
,
7027 fold_build1 (IMAGPART_EXPR
, type
,
7028 TREE_OPERAND (arg0
, 0)),
7029 fold_build1 (IMAGPART_EXPR
, type
,
7030 TREE_OPERAND (arg0
, 1)));
7035 } /* switch (code) */
7038 /* Fold a binary expression of code CODE and type TYPE with operands
7039 OP0 and OP1. Return the folded expression if folding is
7040 successful. Otherwise, return NULL_TREE. */
7043 fold_binary (enum tree_code code
, tree type
, tree op0
, tree op1
)
7045 tree t1
= NULL_TREE
;
7047 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
7048 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7050 /* WINS will be nonzero when the switch is done
7051 if all operands are constant. */
7054 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7055 && TREE_CODE_LENGTH (code
) == 2);
7064 /* Strip any conversions that don't change the mode. This is
7065 safe for every expression, except for a comparison expression
7066 because its signedness is derived from its operands. So, in
7067 the latter case, only strip conversions that don't change the
7070 Note that this is done as an internal manipulation within the
7071 constant folder, in order to find the simplest representation
7072 of the arguments so that their form can be studied. In any
7073 cases, the appropriate type conversions should be put back in
7074 the tree that will get out of the constant folder. */
7075 if (kind
== tcc_comparison
)
7076 STRIP_SIGN_NOPS (arg0
);
7080 if (TREE_CODE (arg0
) == COMPLEX_CST
)
7081 subop
= TREE_REALPART (arg0
);
7085 if (TREE_CODE (subop
) != INTEGER_CST
7086 && TREE_CODE (subop
) != REAL_CST
)
7087 /* Note that TREE_CONSTANT isn't enough:
7088 static var addresses are constant but we can't
7089 do arithmetic on them. */
7097 /* Strip any conversions that don't change the mode. This is
7098 safe for every expression, except for a comparison expression
7099 because its signedness is derived from its operands. So, in
7100 the latter case, only strip conversions that don't change the
7103 Note that this is done as an internal manipulation within the
7104 constant folder, in order to find the simplest representation
7105 of the arguments so that their form can be studied. In any
7106 cases, the appropriate type conversions should be put back in
7107 the tree that will get out of the constant folder. */
7108 if (kind
== tcc_comparison
)
7109 STRIP_SIGN_NOPS (arg1
);
7113 if (TREE_CODE (arg1
) == COMPLEX_CST
)
7114 subop
= TREE_REALPART (arg1
);
7118 if (TREE_CODE (subop
) != INTEGER_CST
7119 && TREE_CODE (subop
) != REAL_CST
)
7120 /* Note that TREE_CONSTANT isn't enough:
7121 static var addresses are constant but we can't
7122 do arithmetic on them. */
7126 /* If this is a commutative operation, and ARG0 is a constant, move it
7127 to ARG1 to reduce the number of tests below. */
7128 if (commutative_tree_code (code
)
7129 && tree_swap_operands_p (arg0
, arg1
, true))
7130 return fold_build2 (code
, type
, op1
, op0
);
7132 /* Now WINS is set as described above,
7133 ARG0 is the first operand of EXPR,
7134 and ARG1 is the second operand (if it has more than one operand).
7136 First check for cases where an arithmetic operation is applied to a
7137 compound, conditional, or comparison operation. Push the arithmetic
7138 operation inside the compound or conditional to see if any folding
7139 can then be done. Convert comparison to conditional for this purpose.
7140 The also optimizes non-constant cases that used to be done in
7143 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
7144 one of the operands is a comparison and the other is a comparison, a
7145 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
7146 code below would make the expression more complex. Change it to a
7147 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
7148 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
7150 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
7151 || code
== EQ_EXPR
|| code
== NE_EXPR
)
7152 && ((truth_value_p (TREE_CODE (arg0
))
7153 && (truth_value_p (TREE_CODE (arg1
))
7154 || (TREE_CODE (arg1
) == BIT_AND_EXPR
7155 && integer_onep (TREE_OPERAND (arg1
, 1)))))
7156 || (truth_value_p (TREE_CODE (arg1
))
7157 && (truth_value_p (TREE_CODE (arg0
))
7158 || (TREE_CODE (arg0
) == BIT_AND_EXPR
7159 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
7161 tem
= fold_build2 (code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
7162 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
7165 fold_convert (boolean_type_node
, arg0
),
7166 fold_convert (boolean_type_node
, arg1
));
7168 if (code
== EQ_EXPR
)
7169 tem
= invert_truthvalue (tem
);
7171 return fold_convert (type
, tem
);
7174 if (TREE_CODE_CLASS (code
) == tcc_binary
7175 || TREE_CODE_CLASS (code
) == tcc_comparison
)
7177 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7178 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7179 fold_build2 (code
, type
,
7180 TREE_OPERAND (arg0
, 1), op1
));
7181 if (TREE_CODE (arg1
) == COMPOUND_EXPR
7182 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
7183 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
7184 fold_build2 (code
, type
,
7185 op0
, TREE_OPERAND (arg1
, 1)));
7187 if (TREE_CODE (arg0
) == COND_EXPR
|| COMPARISON_CLASS_P (arg0
))
7189 tem
= fold_binary_op_with_conditional_arg (code
, type
, op0
, op1
,
7191 /*cond_first_p=*/1);
7192 if (tem
!= NULL_TREE
)
7196 if (TREE_CODE (arg1
) == COND_EXPR
|| COMPARISON_CLASS_P (arg1
))
7198 tem
= fold_binary_op_with_conditional_arg (code
, type
, op0
, op1
,
7200 /*cond_first_p=*/0);
7201 if (tem
!= NULL_TREE
)
7209 /* A + (-B) -> A - B */
7210 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
7211 return fold_build2 (MINUS_EXPR
, type
,
7212 fold_convert (type
, arg0
),
7213 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
7214 /* (-A) + B -> B - A */
7215 if (TREE_CODE (arg0
) == NEGATE_EXPR
7216 && reorder_operands_p (TREE_OPERAND (arg0
, 0), arg1
))
7217 return fold_build2 (MINUS_EXPR
, type
,
7218 fold_convert (type
, arg1
),
7219 fold_convert (type
, TREE_OPERAND (arg0
, 0)));
7220 /* Convert ~A + 1 to -A. */
7221 if (INTEGRAL_TYPE_P (type
)
7222 && TREE_CODE (arg0
) == BIT_NOT_EXPR
7223 && integer_onep (arg1
))
7224 return fold_build1 (NEGATE_EXPR
, type
, TREE_OPERAND (arg0
, 0));
7226 if (! FLOAT_TYPE_P (type
))
7228 if (integer_zerop (arg1
))
7229 return non_lvalue (fold_convert (type
, arg0
));
7231 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
7232 with a constant, and the two constants have no bits in common,
7233 we should treat this as a BIT_IOR_EXPR since this may produce more
7235 if (TREE_CODE (arg0
) == BIT_AND_EXPR
7236 && TREE_CODE (arg1
) == BIT_AND_EXPR
7237 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
7238 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
7239 && integer_zerop (const_binop (BIT_AND_EXPR
,
7240 TREE_OPERAND (arg0
, 1),
7241 TREE_OPERAND (arg1
, 1), 0)))
7243 code
= BIT_IOR_EXPR
;
7247 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
7248 (plus (plus (mult) (mult)) (foo)) so that we can
7249 take advantage of the factoring cases below. */
7250 if (((TREE_CODE (arg0
) == PLUS_EXPR
7251 || TREE_CODE (arg0
) == MINUS_EXPR
)
7252 && TREE_CODE (arg1
) == MULT_EXPR
)
7253 || ((TREE_CODE (arg1
) == PLUS_EXPR
7254 || TREE_CODE (arg1
) == MINUS_EXPR
)
7255 && TREE_CODE (arg0
) == MULT_EXPR
))
7257 tree parg0
, parg1
, parg
, marg
;
7258 enum tree_code pcode
;
7260 if (TREE_CODE (arg1
) == MULT_EXPR
)
7261 parg
= arg0
, marg
= arg1
;
7263 parg
= arg1
, marg
= arg0
;
7264 pcode
= TREE_CODE (parg
);
7265 parg0
= TREE_OPERAND (parg
, 0);
7266 parg1
= TREE_OPERAND (parg
, 1);
7270 if (TREE_CODE (parg0
) == MULT_EXPR
7271 && TREE_CODE (parg1
) != MULT_EXPR
)
7272 return fold_build2 (pcode
, type
,
7273 fold_build2 (PLUS_EXPR
, type
,
7274 fold_convert (type
, parg0
),
7275 fold_convert (type
, marg
)),
7276 fold_convert (type
, parg1
));
7277 if (TREE_CODE (parg0
) != MULT_EXPR
7278 && TREE_CODE (parg1
) == MULT_EXPR
)
7279 return fold_build2 (PLUS_EXPR
, type
,
7280 fold_convert (type
, parg0
),
7281 fold_build2 (pcode
, type
,
7282 fold_convert (type
, marg
),
7287 if (TREE_CODE (arg0
) == MULT_EXPR
&& TREE_CODE (arg1
) == MULT_EXPR
)
7289 tree arg00
, arg01
, arg10
, arg11
;
7290 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
7292 /* (A * C) + (B * C) -> (A+B) * C.
7293 We are most concerned about the case where C is a constant,
7294 but other combinations show up during loop reduction. Since
7295 it is not difficult, try all four possibilities. */
7297 arg00
= TREE_OPERAND (arg0
, 0);
7298 arg01
= TREE_OPERAND (arg0
, 1);
7299 arg10
= TREE_OPERAND (arg1
, 0);
7300 arg11
= TREE_OPERAND (arg1
, 1);
7303 if (operand_equal_p (arg01
, arg11
, 0))
7304 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
7305 else if (operand_equal_p (arg00
, arg10
, 0))
7306 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
7307 else if (operand_equal_p (arg00
, arg11
, 0))
7308 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
7309 else if (operand_equal_p (arg01
, arg10
, 0))
7310 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
7312 /* No identical multiplicands; see if we can find a common
7313 power-of-two factor in non-power-of-two multiplies. This
7314 can help in multi-dimensional array access. */
7315 else if (TREE_CODE (arg01
) == INTEGER_CST
7316 && TREE_CODE (arg11
) == INTEGER_CST
7317 && TREE_INT_CST_HIGH (arg01
) == 0
7318 && TREE_INT_CST_HIGH (arg11
) == 0)
7320 HOST_WIDE_INT int01
, int11
, tmp
;
7321 int01
= TREE_INT_CST_LOW (arg01
);
7322 int11
= TREE_INT_CST_LOW (arg11
);
7324 /* Move min of absolute values to int11. */
7325 if ((int01
>= 0 ? int01
: -int01
)
7326 < (int11
>= 0 ? int11
: -int11
))
7328 tmp
= int01
, int01
= int11
, int11
= tmp
;
7329 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
7330 alt0
= arg01
, arg01
= arg11
, arg11
= alt0
;
7333 if (exact_log2 (int11
) > 0 && int01
% int11
== 0)
7335 alt0
= fold_build2 (MULT_EXPR
, type
, arg00
,
7336 build_int_cst (NULL_TREE
,
7344 return fold_build2 (MULT_EXPR
, type
,
7345 fold_build2 (PLUS_EXPR
, type
,
7346 fold_convert (type
, alt0
),
7347 fold_convert (type
, alt1
)),
7348 fold_convert (type
, same
));
7351 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
7352 of the array. Loop optimizer sometimes produce this type of
7354 if (TREE_CODE (arg0
) == ADDR_EXPR
)
7356 tem
= try_move_mult_to_index (PLUS_EXPR
, arg0
, arg1
);
7358 return fold_convert (type
, tem
);
7360 else if (TREE_CODE (arg1
) == ADDR_EXPR
)
7362 tem
= try_move_mult_to_index (PLUS_EXPR
, arg1
, arg0
);
7364 return fold_convert (type
, tem
);
7369 /* See if ARG1 is zero and X + ARG1 reduces to X. */
7370 if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 0))
7371 return non_lvalue (fold_convert (type
, arg0
));
7373 /* Likewise if the operands are reversed. */
7374 if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
7375 return non_lvalue (fold_convert (type
, arg1
));
7377 /* Convert X + -C into X - C. */
7378 if (TREE_CODE (arg1
) == REAL_CST
7379 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
)))
7381 tem
= fold_negate_const (arg1
, type
);
7382 if (!TREE_OVERFLOW (arg1
) || !flag_trapping_math
)
7383 return fold_build2 (MINUS_EXPR
, type
,
7384 fold_convert (type
, arg0
),
7385 fold_convert (type
, tem
));
7388 if (flag_unsafe_math_optimizations
7389 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
7390 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
7391 && (tem
= distribute_real_division (code
, type
, arg0
, arg1
)))
7394 /* Convert x+x into x*2.0. */
7395 if (operand_equal_p (arg0
, arg1
, 0)
7396 && SCALAR_FLOAT_TYPE_P (type
))
7397 return fold_build2 (MULT_EXPR
, type
, arg0
,
7398 build_real (type
, dconst2
));
7400 /* Convert x*c+x into x*(c+1). */
7401 if (flag_unsafe_math_optimizations
7402 && TREE_CODE (arg0
) == MULT_EXPR
7403 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
7404 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0
, 1))
7405 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
7409 c
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
7410 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
7411 return fold_build2 (MULT_EXPR
, type
, arg1
,
7412 build_real (type
, c
));
7415 /* Convert x+x*c into x*(c+1). */
7416 if (flag_unsafe_math_optimizations
7417 && TREE_CODE (arg1
) == MULT_EXPR
7418 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
7419 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1
, 1))
7420 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
7424 c
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
7425 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
7426 return fold_build2 (MULT_EXPR
, type
, arg0
,
7427 build_real (type
, c
));
7430 /* Convert x*c1+x*c2 into x*(c1+c2). */
7431 if (flag_unsafe_math_optimizations
7432 && TREE_CODE (arg0
) == MULT_EXPR
7433 && TREE_CODE (arg1
) == MULT_EXPR
7434 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
7435 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0
, 1))
7436 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
7437 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1
, 1))
7438 && operand_equal_p (TREE_OPERAND (arg0
, 0),
7439 TREE_OPERAND (arg1
, 0), 0))
7441 REAL_VALUE_TYPE c1
, c2
;
7443 c1
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
7444 c2
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
7445 real_arithmetic (&c1
, PLUS_EXPR
, &c1
, &c2
);
7446 return fold_build2 (MULT_EXPR
, type
,
7447 TREE_OPERAND (arg0
, 0),
7448 build_real (type
, c1
));
7450 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
7451 if (flag_unsafe_math_optimizations
7452 && TREE_CODE (arg1
) == PLUS_EXPR
7453 && TREE_CODE (arg0
) != MULT_EXPR
)
7455 tree tree10
= TREE_OPERAND (arg1
, 0);
7456 tree tree11
= TREE_OPERAND (arg1
, 1);
7457 if (TREE_CODE (tree11
) == MULT_EXPR
7458 && TREE_CODE (tree10
) == MULT_EXPR
)
7461 tree0
= fold_build2 (PLUS_EXPR
, type
, arg0
, tree10
);
7462 return fold_build2 (PLUS_EXPR
, type
, tree0
, tree11
);
7465 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
7466 if (flag_unsafe_math_optimizations
7467 && TREE_CODE (arg0
) == PLUS_EXPR
7468 && TREE_CODE (arg1
) != MULT_EXPR
)
7470 tree tree00
= TREE_OPERAND (arg0
, 0);
7471 tree tree01
= TREE_OPERAND (arg0
, 1);
7472 if (TREE_CODE (tree01
) == MULT_EXPR
7473 && TREE_CODE (tree00
) == MULT_EXPR
)
7476 tree0
= fold_build2 (PLUS_EXPR
, type
, tree01
, arg1
);
7477 return fold_build2 (PLUS_EXPR
, type
, tree00
, tree0
);
7483 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
7484 is a rotate of A by C1 bits. */
7485 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
7486 is a rotate of A by B bits. */
7488 enum tree_code code0
, code1
;
7489 code0
= TREE_CODE (arg0
);
7490 code1
= TREE_CODE (arg1
);
7491 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
7492 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
7493 && operand_equal_p (TREE_OPERAND (arg0
, 0),
7494 TREE_OPERAND (arg1
, 0), 0)
7495 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
7497 tree tree01
, tree11
;
7498 enum tree_code code01
, code11
;
7500 tree01
= TREE_OPERAND (arg0
, 1);
7501 tree11
= TREE_OPERAND (arg1
, 1);
7502 STRIP_NOPS (tree01
);
7503 STRIP_NOPS (tree11
);
7504 code01
= TREE_CODE (tree01
);
7505 code11
= TREE_CODE (tree11
);
7506 if (code01
== INTEGER_CST
7507 && code11
== INTEGER_CST
7508 && TREE_INT_CST_HIGH (tree01
) == 0
7509 && TREE_INT_CST_HIGH (tree11
) == 0
7510 && ((TREE_INT_CST_LOW (tree01
) + TREE_INT_CST_LOW (tree11
))
7511 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
7512 return build2 (LROTATE_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7513 code0
== LSHIFT_EXPR
? tree01
: tree11
);
7514 else if (code11
== MINUS_EXPR
)
7516 tree tree110
, tree111
;
7517 tree110
= TREE_OPERAND (tree11
, 0);
7518 tree111
= TREE_OPERAND (tree11
, 1);
7519 STRIP_NOPS (tree110
);
7520 STRIP_NOPS (tree111
);
7521 if (TREE_CODE (tree110
) == INTEGER_CST
7522 && 0 == compare_tree_int (tree110
,
7524 (TREE_TYPE (TREE_OPERAND
7526 && operand_equal_p (tree01
, tree111
, 0))
7527 return build2 ((code0
== LSHIFT_EXPR
7530 type
, TREE_OPERAND (arg0
, 0), tree01
);
7532 else if (code01
== MINUS_EXPR
)
7534 tree tree010
, tree011
;
7535 tree010
= TREE_OPERAND (tree01
, 0);
7536 tree011
= TREE_OPERAND (tree01
, 1);
7537 STRIP_NOPS (tree010
);
7538 STRIP_NOPS (tree011
);
7539 if (TREE_CODE (tree010
) == INTEGER_CST
7540 && 0 == compare_tree_int (tree010
,
7542 (TREE_TYPE (TREE_OPERAND
7544 && operand_equal_p (tree11
, tree011
, 0))
7545 return build2 ((code0
!= LSHIFT_EXPR
7548 type
, TREE_OPERAND (arg0
, 0), tree11
);
7554 /* In most languages, can't associate operations on floats through
7555 parentheses. Rather than remember where the parentheses were, we
7556 don't associate floats at all, unless the user has specified
7557 -funsafe-math-optimizations. */
7560 && (! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
))
7562 tree var0
, con0
, lit0
, minus_lit0
;
7563 tree var1
, con1
, lit1
, minus_lit1
;
7566 /* Split both trees into variables, constants, and literals. Then
7567 associate each group together, the constants with literals,
7568 then the result with variables. This increases the chances of
7569 literals being recombined later and of generating relocatable
7570 expressions for the sum of a constant and literal. */
7571 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
7572 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
7573 code
== MINUS_EXPR
);
7575 /* With undefined overflow we can only associate constants
7576 with one variable. */
7577 if ((POINTER_TYPE_P (type
)
7578 || (INTEGRAL_TYPE_P (type
)
7579 && !(TYPE_UNSIGNED (type
) || flag_wrapv
)))
7585 if (TREE_CODE (tmp0
) == NEGATE_EXPR
)
7586 tmp0
= TREE_OPERAND (tmp0
, 0);
7587 if (TREE_CODE (tmp1
) == NEGATE_EXPR
)
7588 tmp1
= TREE_OPERAND (tmp1
, 0);
7589 /* The only case we can still associate with two variables
7590 is if they are the same, modulo negation. */
7591 if (!operand_equal_p (tmp0
, tmp1
, 0))
7595 /* Only do something if we found more than two objects. Otherwise,
7596 nothing has changed and we risk infinite recursion. */
7598 && (2 < ((var0
!= 0) + (var1
!= 0)
7599 + (con0
!= 0) + (con1
!= 0)
7600 + (lit0
!= 0) + (lit1
!= 0)
7601 + (minus_lit0
!= 0) + (minus_lit1
!= 0))))
7603 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
7604 if (code
== MINUS_EXPR
)
7607 var0
= associate_trees (var0
, var1
, code
, type
);
7608 con0
= associate_trees (con0
, con1
, code
, type
);
7609 lit0
= associate_trees (lit0
, lit1
, code
, type
);
7610 minus_lit0
= associate_trees (minus_lit0
, minus_lit1
, code
, type
);
7612 /* Preserve the MINUS_EXPR if the negative part of the literal is
7613 greater than the positive part. Otherwise, the multiplicative
7614 folding code (i.e extract_muldiv) may be fooled in case
7615 unsigned constants are subtracted, like in the following
7616 example: ((X*2 + 4) - 8U)/2. */
7617 if (minus_lit0
&& lit0
)
7619 if (TREE_CODE (lit0
) == INTEGER_CST
7620 && TREE_CODE (minus_lit0
) == INTEGER_CST
7621 && tree_int_cst_lt (lit0
, minus_lit0
))
7623 minus_lit0
= associate_trees (minus_lit0
, lit0
,
7629 lit0
= associate_trees (lit0
, minus_lit0
,
7637 return fold_convert (type
,
7638 associate_trees (var0
, minus_lit0
,
7642 con0
= associate_trees (con0
, minus_lit0
,
7644 return fold_convert (type
,
7645 associate_trees (var0
, con0
,
7650 con0
= associate_trees (con0
, lit0
, code
, type
);
7651 return fold_convert (type
, associate_trees (var0
, con0
,
7658 t1
= const_binop (code
, arg0
, arg1
, 0);
7659 if (t1
!= NULL_TREE
)
7661 /* The return value should always have
7662 the same type as the original expression. */
7663 if (TREE_TYPE (t1
) != type
)
7664 t1
= fold_convert (type
, t1
);
7671 /* A - (-B) -> A + B */
7672 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
7673 return fold_build2 (PLUS_EXPR
, type
, arg0
, TREE_OPERAND (arg1
, 0));
7674 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
7675 if (TREE_CODE (arg0
) == NEGATE_EXPR
7676 && (FLOAT_TYPE_P (type
)
7677 || (INTEGRAL_TYPE_P (type
) && flag_wrapv
&& !flag_trapv
))
7678 && negate_expr_p (arg1
)
7679 && reorder_operands_p (arg0
, arg1
))
7680 return fold_build2 (MINUS_EXPR
, type
, negate_expr (arg1
),
7681 TREE_OPERAND (arg0
, 0));
7682 /* Convert -A - 1 to ~A. */
7683 if (INTEGRAL_TYPE_P (type
)
7684 && TREE_CODE (arg0
) == NEGATE_EXPR
7685 && integer_onep (arg1
))
7686 return fold_build1 (BIT_NOT_EXPR
, type
, TREE_OPERAND (arg0
, 0));
7688 /* Convert -1 - A to ~A. */
7689 if (INTEGRAL_TYPE_P (type
)
7690 && integer_all_onesp (arg0
))
7691 return fold_build1 (BIT_NOT_EXPR
, type
, arg1
);
7693 if (! FLOAT_TYPE_P (type
))
7695 if (! wins
&& integer_zerop (arg0
))
7696 return negate_expr (fold_convert (type
, arg1
));
7697 if (integer_zerop (arg1
))
7698 return non_lvalue (fold_convert (type
, arg0
));
7700 /* Fold A - (A & B) into ~B & A. */
7701 if (!TREE_SIDE_EFFECTS (arg0
)
7702 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
7704 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
7705 return fold_build2 (BIT_AND_EXPR
, type
,
7706 fold_build1 (BIT_NOT_EXPR
, type
,
7707 TREE_OPERAND (arg1
, 0)),
7709 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
7710 return fold_build2 (BIT_AND_EXPR
, type
,
7711 fold_build1 (BIT_NOT_EXPR
, type
,
7712 TREE_OPERAND (arg1
, 1)),
7716 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
7717 any power of 2 minus 1. */
7718 if (TREE_CODE (arg0
) == BIT_AND_EXPR
7719 && TREE_CODE (arg1
) == BIT_AND_EXPR
7720 && operand_equal_p (TREE_OPERAND (arg0
, 0),
7721 TREE_OPERAND (arg1
, 0), 0))
7723 tree mask0
= TREE_OPERAND (arg0
, 1);
7724 tree mask1
= TREE_OPERAND (arg1
, 1);
7725 tree tem
= fold_build1 (BIT_NOT_EXPR
, type
, mask0
);
7727 if (operand_equal_p (tem
, mask1
, 0))
7729 tem
= fold_build2 (BIT_XOR_EXPR
, type
,
7730 TREE_OPERAND (arg0
, 0), mask1
);
7731 return fold_build2 (MINUS_EXPR
, type
, tem
, mask1
);
7736 /* See if ARG1 is zero and X - ARG1 reduces to X. */
7737 else if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 1))
7738 return non_lvalue (fold_convert (type
, arg0
));
7740 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
7741 ARG0 is zero and X + ARG0 reduces to X, since that would mean
7742 (-ARG1 + ARG0) reduces to -ARG1. */
7743 else if (!wins
&& fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
7744 return negate_expr (fold_convert (type
, arg1
));
7746 /* Fold &x - &x. This can happen from &x.foo - &x.
7747 This is unsafe for certain floats even in non-IEEE formats.
7748 In IEEE, it is unsafe because it does wrong for NaNs.
7749 Also note that operand_equal_p is always false if an operand
7752 if ((! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
7753 && operand_equal_p (arg0
, arg1
, 0))
7754 return fold_convert (type
, integer_zero_node
);
7756 /* A - B -> A + (-B) if B is easily negatable. */
7757 if (!wins
&& negate_expr_p (arg1
)
7758 && ((FLOAT_TYPE_P (type
)
7759 /* Avoid this transformation if B is a positive REAL_CST. */
7760 && (TREE_CODE (arg1
) != REAL_CST
7761 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
7762 || (INTEGRAL_TYPE_P (type
) && flag_wrapv
&& !flag_trapv
)))
7763 return fold_build2 (PLUS_EXPR
, type
,
7764 fold_convert (type
, arg0
),
7765 fold_convert (type
, negate_expr (arg1
)));
7767 /* Try folding difference of addresses. */
7771 if ((TREE_CODE (arg0
) == ADDR_EXPR
7772 || TREE_CODE (arg1
) == ADDR_EXPR
)
7773 && ptr_difference_const (arg0
, arg1
, &diff
))
7774 return build_int_cst_type (type
, diff
);
7777 /* Fold &a[i] - &a[j] to i-j. */
7778 if (TREE_CODE (arg0
) == ADDR_EXPR
7779 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
7780 && TREE_CODE (arg1
) == ADDR_EXPR
7781 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
7783 tree aref0
= TREE_OPERAND (arg0
, 0);
7784 tree aref1
= TREE_OPERAND (arg1
, 0);
7785 if (operand_equal_p (TREE_OPERAND (aref0
, 0),
7786 TREE_OPERAND (aref1
, 0), 0))
7788 tree op0
= fold_convert (type
, TREE_OPERAND (aref0
, 1));
7789 tree op1
= fold_convert (type
, TREE_OPERAND (aref1
, 1));
7790 tree esz
= array_ref_element_size (aref0
);
7791 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
7792 return fold_build2 (MULT_EXPR
, type
, diff
,
7793 fold_convert (type
, esz
));
7798 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
7799 of the array. Loop optimizer sometimes produce this type of
7801 if (TREE_CODE (arg0
) == ADDR_EXPR
)
7803 tem
= try_move_mult_to_index (MINUS_EXPR
, arg0
, arg1
);
7805 return fold_convert (type
, tem
);
7808 if (flag_unsafe_math_optimizations
7809 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
7810 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
7811 && (tem
= distribute_real_division (code
, type
, arg0
, arg1
)))
7814 if (TREE_CODE (arg0
) == MULT_EXPR
7815 && TREE_CODE (arg1
) == MULT_EXPR
7816 && (!FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
))
7818 /* (A * C) - (B * C) -> (A-B) * C. */
7819 if (operand_equal_p (TREE_OPERAND (arg0
, 1),
7820 TREE_OPERAND (arg1
, 1), 0))
7821 return fold_build2 (MULT_EXPR
, type
,
7822 fold_build2 (MINUS_EXPR
, type
,
7823 TREE_OPERAND (arg0
, 0),
7824 TREE_OPERAND (arg1
, 0)),
7825 TREE_OPERAND (arg0
, 1));
7826 /* (A * C1) - (A * C2) -> A * (C1-C2). */
7827 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
7828 TREE_OPERAND (arg1
, 0), 0))
7829 return fold_build2 (MULT_EXPR
, type
,
7830 TREE_OPERAND (arg0
, 0),
7831 fold_build2 (MINUS_EXPR
, type
,
7832 TREE_OPERAND (arg0
, 1),
7833 TREE_OPERAND (arg1
, 1)));
7839 /* (-A) * (-B) -> A * B */
7840 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
7841 return fold_build2 (MULT_EXPR
, type
,
7842 fold_convert (type
, TREE_OPERAND (arg0
, 0)),
7843 fold_convert (type
, negate_expr (arg1
)));
7844 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
7845 return fold_build2 (MULT_EXPR
, type
,
7846 fold_convert (type
, negate_expr (arg0
)),
7847 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
7849 if (! FLOAT_TYPE_P (type
))
7851 if (integer_zerop (arg1
))
7852 return omit_one_operand (type
, arg1
, arg0
);
7853 if (integer_onep (arg1
))
7854 return non_lvalue (fold_convert (type
, arg0
));
7855 /* Transform x * -1 into -x. */
7856 if (integer_all_onesp (arg1
))
7857 return fold_convert (type
, negate_expr (arg0
));
7859 /* (a * (1 << b)) is (a << b) */
7860 if (TREE_CODE (arg1
) == LSHIFT_EXPR
7861 && integer_onep (TREE_OPERAND (arg1
, 0)))
7862 return fold_build2 (LSHIFT_EXPR
, type
, arg0
,
7863 TREE_OPERAND (arg1
, 1));
7864 if (TREE_CODE (arg0
) == LSHIFT_EXPR
7865 && integer_onep (TREE_OPERAND (arg0
, 0)))
7866 return fold_build2 (LSHIFT_EXPR
, type
, arg1
,
7867 TREE_OPERAND (arg0
, 1));
7869 if (TREE_CODE (arg1
) == INTEGER_CST
7870 && 0 != (tem
= extract_muldiv (op0
,
7871 fold_convert (type
, arg1
),
7873 return fold_convert (type
, tem
);
7878 /* Maybe fold x * 0 to 0. The expressions aren't the same
7879 when x is NaN, since x * 0 is also NaN. Nor are they the
7880 same in modes with signed zeros, since multiplying a
7881 negative value by 0 gives -0, not +0. */
7882 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
7883 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
7884 && real_zerop (arg1
))
7885 return omit_one_operand (type
, arg1
, arg0
);
7886 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
7887 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
7888 && real_onep (arg1
))
7889 return non_lvalue (fold_convert (type
, arg0
));
7891 /* Transform x * -1.0 into -x. */
7892 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
7893 && real_minus_onep (arg1
))
7894 return fold_convert (type
, negate_expr (arg0
));
7896 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7897 if (flag_unsafe_math_optimizations
7898 && TREE_CODE (arg0
) == RDIV_EXPR
7899 && TREE_CODE (arg1
) == REAL_CST
7900 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
7902 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
7905 return fold_build2 (RDIV_EXPR
, type
, tem
,
7906 TREE_OPERAND (arg0
, 1));
7909 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
7910 if (operand_equal_p (arg0
, arg1
, 0))
7912 tree tem
= fold_strip_sign_ops (arg0
);
7913 if (tem
!= NULL_TREE
)
7915 tem
= fold_convert (type
, tem
);
7916 return fold_build2 (MULT_EXPR
, type
, tem
, tem
);
7920 if (flag_unsafe_math_optimizations
)
7922 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
7923 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
7925 /* Optimizations of root(...)*root(...). */
7926 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
7928 tree rootfn
, arg
, arglist
;
7929 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
7930 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
7932 /* Optimize sqrt(x)*sqrt(x) as x. */
7933 if (BUILTIN_SQRT_P (fcode0
)
7934 && operand_equal_p (arg00
, arg10
, 0)
7935 && ! HONOR_SNANS (TYPE_MODE (type
)))
7938 /* Optimize root(x)*root(y) as root(x*y). */
7939 rootfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
7940 arg
= fold_build2 (MULT_EXPR
, type
, arg00
, arg10
);
7941 arglist
= build_tree_list (NULL_TREE
, arg
);
7942 return build_function_call_expr (rootfn
, arglist
);
7945 /* Optimize expN(x)*expN(y) as expN(x+y). */
7946 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
7948 tree expfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
7949 tree arg
= fold_build2 (PLUS_EXPR
, type
,
7950 TREE_VALUE (TREE_OPERAND (arg0
, 1)),
7951 TREE_VALUE (TREE_OPERAND (arg1
, 1)));
7952 tree arglist
= build_tree_list (NULL_TREE
, arg
);
7953 return build_function_call_expr (expfn
, arglist
);
7956 /* Optimizations of pow(...)*pow(...). */
7957 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
7958 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
7959 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
7961 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
7962 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
,
7964 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
7965 tree arg11
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1
,
7968 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7969 if (operand_equal_p (arg01
, arg11
, 0))
7971 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
7972 tree arg
= fold_build2 (MULT_EXPR
, type
, arg00
, arg10
);
7973 tree arglist
= tree_cons (NULL_TREE
, arg
,
7974 build_tree_list (NULL_TREE
,
7976 return build_function_call_expr (powfn
, arglist
);
7979 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7980 if (operand_equal_p (arg00
, arg10
, 0))
7982 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
7983 tree arg
= fold_build2 (PLUS_EXPR
, type
, arg01
, arg11
);
7984 tree arglist
= tree_cons (NULL_TREE
, arg00
,
7985 build_tree_list (NULL_TREE
,
7987 return build_function_call_expr (powfn
, arglist
);
7991 /* Optimize tan(x)*cos(x) as sin(x). */
7992 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
7993 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
7994 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
7995 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
7996 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
7997 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
7998 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0
, 1)),
7999 TREE_VALUE (TREE_OPERAND (arg1
, 1)), 0))
8001 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
8003 if (sinfn
!= NULL_TREE
)
8004 return build_function_call_expr (sinfn
,
8005 TREE_OPERAND (arg0
, 1));
8008 /* Optimize x*pow(x,c) as pow(x,c+1). */
8009 if (fcode1
== BUILT_IN_POW
8010 || fcode1
== BUILT_IN_POWF
8011 || fcode1
== BUILT_IN_POWL
)
8013 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
8014 tree arg11
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1
,
8016 if (TREE_CODE (arg11
) == REAL_CST
8017 && ! TREE_CONSTANT_OVERFLOW (arg11
)
8018 && operand_equal_p (arg0
, arg10
, 0))
8020 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0);
8024 c
= TREE_REAL_CST (arg11
);
8025 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
8026 arg
= build_real (type
, c
);
8027 arglist
= build_tree_list (NULL_TREE
, arg
);
8028 arglist
= tree_cons (NULL_TREE
, arg0
, arglist
);
8029 return build_function_call_expr (powfn
, arglist
);
8033 /* Optimize pow(x,c)*x as pow(x,c+1). */
8034 if (fcode0
== BUILT_IN_POW
8035 || fcode0
== BUILT_IN_POWF
8036 || fcode0
== BUILT_IN_POWL
)
8038 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
8039 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
,
8041 if (TREE_CODE (arg01
) == REAL_CST
8042 && ! TREE_CONSTANT_OVERFLOW (arg01
)
8043 && operand_equal_p (arg1
, arg00
, 0))
8045 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
8049 c
= TREE_REAL_CST (arg01
);
8050 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
8051 arg
= build_real (type
, c
);
8052 arglist
= build_tree_list (NULL_TREE
, arg
);
8053 arglist
= tree_cons (NULL_TREE
, arg1
, arglist
);
8054 return build_function_call_expr (powfn
, arglist
);
8058 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
8060 && operand_equal_p (arg0
, arg1
, 0))
8062 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
8066 tree arg
= build_real (type
, dconst2
);
8067 tree arglist
= build_tree_list (NULL_TREE
, arg
);
8068 arglist
= tree_cons (NULL_TREE
, arg0
, arglist
);
8069 return build_function_call_expr (powfn
, arglist
);
8078 if (integer_all_onesp (arg1
))
8079 return omit_one_operand (type
, arg1
, arg0
);
8080 if (integer_zerop (arg1
))
8081 return non_lvalue (fold_convert (type
, arg0
));
8082 if (operand_equal_p (arg0
, arg1
, 0))
8083 return non_lvalue (fold_convert (type
, arg0
));
8086 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
8087 && INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
8088 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
8090 t1
= build_int_cst (type
, -1);
8091 t1
= force_fit_type (t1
, 0, false, false);
8092 return omit_one_operand (type
, t1
, arg1
);
8096 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
8097 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
8098 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
8100 t1
= build_int_cst (type
, -1);
8101 t1
= force_fit_type (t1
, 0, false, false);
8102 return omit_one_operand (type
, t1
, arg0
);
8105 /* Canonicalize (X & C1) | C2. */
8106 if (TREE_CODE (arg0
) == BIT_AND_EXPR
8107 && TREE_CODE (arg1
) == INTEGER_CST
8108 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8110 unsigned HOST_WIDE_INT hi1
, lo1
, hi2
, lo2
, mlo
, mhi
;
8111 int width
= TYPE_PRECISION (type
);
8112 hi1
= TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1));
8113 lo1
= TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1));
8114 hi2
= TREE_INT_CST_HIGH (arg1
);
8115 lo2
= TREE_INT_CST_LOW (arg1
);
8117 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
8118 if ((hi1
& hi2
) == hi1
&& (lo1
& lo2
) == lo1
)
8119 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 0));
8121 if (width
> HOST_BITS_PER_WIDE_INT
)
8123 mhi
= (unsigned HOST_WIDE_INT
) -1
8124 >> (2 * HOST_BITS_PER_WIDE_INT
- width
);
8130 mlo
= (unsigned HOST_WIDE_INT
) -1
8131 >> (HOST_BITS_PER_WIDE_INT
- width
);
8134 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
8135 if ((~(hi1
| hi2
) & mhi
) == 0 && (~(lo1
| lo2
) & mlo
) == 0)
8136 return fold_build2 (BIT_IOR_EXPR
, type
,
8137 TREE_OPERAND (arg0
, 0), arg1
);
8139 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
8142 if ((hi1
& ~hi2
) != hi1
|| (lo1
& ~lo2
) != lo1
)
8143 return fold_build2 (BIT_IOR_EXPR
, type
,
8144 fold_build2 (BIT_AND_EXPR
, type
,
8145 TREE_OPERAND (arg0
, 0),
8146 build_int_cst_wide (type
,
8152 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
8153 if (t1
!= NULL_TREE
)
8156 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
8158 This results in more efficient code for machines without a NAND
8159 instruction. Combine will canonicalize to the first form
8160 which will allow use of NAND instructions provided by the
8161 backend if they exist. */
8162 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
8163 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
8165 return fold_build1 (BIT_NOT_EXPR
, type
,
8166 build2 (BIT_AND_EXPR
, type
,
8167 TREE_OPERAND (arg0
, 0),
8168 TREE_OPERAND (arg1
, 0)));
8171 /* See if this can be simplified into a rotate first. If that
8172 is unsuccessful continue in the association code. */
8176 if (integer_zerop (arg1
))
8177 return non_lvalue (fold_convert (type
, arg0
));
8178 if (integer_all_onesp (arg1
))
8179 return fold_build1 (BIT_NOT_EXPR
, type
, arg0
);
8180 if (operand_equal_p (arg0
, arg1
, 0))
8181 return omit_one_operand (type
, integer_zero_node
, arg0
);
8184 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
8185 && INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
8186 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
8188 t1
= build_int_cst (type
, -1);
8189 t1
= force_fit_type (t1
, 0, false, false);
8190 return omit_one_operand (type
, t1
, arg1
);
8194 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
8195 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
8196 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
8198 t1
= build_int_cst (type
, -1);
8199 t1
= force_fit_type (t1
, 0, false, false);
8200 return omit_one_operand (type
, t1
, arg0
);
8203 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
8204 with a constant, and the two constants have no bits in common,
8205 we should treat this as a BIT_IOR_EXPR since this may produce more
8207 if (TREE_CODE (arg0
) == BIT_AND_EXPR
8208 && TREE_CODE (arg1
) == BIT_AND_EXPR
8209 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8210 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
8211 && integer_zerop (const_binop (BIT_AND_EXPR
,
8212 TREE_OPERAND (arg0
, 1),
8213 TREE_OPERAND (arg1
, 1), 0)))
8215 code
= BIT_IOR_EXPR
;
8219 /* (X | Y) ^ X -> Y & ~ X*/
8220 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
8221 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
8223 tree t2
= TREE_OPERAND (arg0
, 1);
8224 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
),
8226 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
8227 fold_convert (type
, t1
));
8231 /* (Y | X) ^ X -> Y & ~ X*/
8232 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
8233 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
8235 tree t2
= TREE_OPERAND (arg0
, 0);
8236 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
),
8238 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
8239 fold_convert (type
, t1
));
8243 /* X ^ (X | Y) -> Y & ~ X*/
8244 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
8245 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
8247 tree t2
= TREE_OPERAND (arg1
, 1);
8248 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg0
),
8250 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
8251 fold_convert (type
, t1
));
8255 /* X ^ (Y | X) -> Y & ~ X*/
8256 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
8257 && operand_equal_p (TREE_OPERAND (arg1
, 1), arg0
, 0))
8259 tree t2
= TREE_OPERAND (arg1
, 0);
8260 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg0
),
8262 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
8263 fold_convert (type
, t1
));
8267 /* Convert ~X ^ ~Y to X ^ Y. */
8268 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
8269 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
8270 return fold_build2 (code
, type
,
8271 fold_convert (type
, TREE_OPERAND (arg0
, 0)),
8272 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
8274 /* See if this can be simplified into a rotate first. If that
8275 is unsuccessful continue in the association code. */
8279 if (integer_all_onesp (arg1
))
8280 return non_lvalue (fold_convert (type
, arg0
));
8281 if (integer_zerop (arg1
))
8282 return omit_one_operand (type
, arg1
, arg0
);
8283 if (operand_equal_p (arg0
, arg1
, 0))
8284 return non_lvalue (fold_convert (type
, arg0
));
8286 /* ~X & X is always zero. */
8287 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
8288 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
8289 return omit_one_operand (type
, integer_zero_node
, arg1
);
8291 /* X & ~X is always zero. */
8292 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
8293 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
8294 return omit_one_operand (type
, integer_zero_node
, arg0
);
8296 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
8297 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
8298 && TREE_CODE (arg1
) == INTEGER_CST
8299 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8300 return fold_build2 (BIT_IOR_EXPR
, type
,
8301 fold_build2 (BIT_AND_EXPR
, type
,
8302 TREE_OPERAND (arg0
, 0), arg1
),
8303 fold_build2 (BIT_AND_EXPR
, type
,
8304 TREE_OPERAND (arg0
, 1), arg1
));
8306 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
8307 if (t1
!= NULL_TREE
)
8309 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
8310 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
8311 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
8314 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
8316 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
8317 && (~TREE_INT_CST_LOW (arg1
)
8318 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
8319 return fold_convert (type
, TREE_OPERAND (arg0
, 0));
8322 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
8324 This results in more efficient code for machines without a NOR
8325 instruction. Combine will canonicalize to the first form
8326 which will allow use of NOR instructions provided by the
8327 backend if they exist. */
8328 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
8329 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
8331 return fold_build1 (BIT_NOT_EXPR
, type
,
8332 build2 (BIT_IOR_EXPR
, type
,
8333 TREE_OPERAND (arg0
, 0),
8334 TREE_OPERAND (arg1
, 0)));
8340 /* Don't touch a floating-point divide by zero unless the mode
8341 of the constant can represent infinity. */
8342 if (TREE_CODE (arg1
) == REAL_CST
8343 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
8344 && real_zerop (arg1
))
8347 /* (-A) / (-B) -> A / B */
8348 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
8349 return fold_build2 (RDIV_EXPR
, type
,
8350 TREE_OPERAND (arg0
, 0),
8351 negate_expr (arg1
));
8352 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
8353 return fold_build2 (RDIV_EXPR
, type
,
8355 TREE_OPERAND (arg1
, 0));
8357 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
8358 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
8359 && real_onep (arg1
))
8360 return non_lvalue (fold_convert (type
, arg0
));
8362 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
8363 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
8364 && real_minus_onep (arg1
))
8365 return non_lvalue (fold_convert (type
, negate_expr (arg0
)));
8367 /* If ARG1 is a constant, we can convert this to a multiply by the
8368 reciprocal. This does not have the same rounding properties,
8369 so only do this if -funsafe-math-optimizations. We can actually
8370 always safely do it if ARG1 is a power of two, but it's hard to
8371 tell if it is or not in a portable manner. */
8372 if (TREE_CODE (arg1
) == REAL_CST
)
8374 if (flag_unsafe_math_optimizations
8375 && 0 != (tem
= const_binop (code
, build_real (type
, dconst1
),
8377 return fold_build2 (MULT_EXPR
, type
, arg0
, tem
);
8378 /* Find the reciprocal if optimizing and the result is exact. */
8382 r
= TREE_REAL_CST (arg1
);
8383 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0
)), &r
))
8385 tem
= build_real (type
, r
);
8386 return fold_build2 (MULT_EXPR
, type
,
8387 fold_convert (type
, arg0
), tem
);
8391 /* Convert A/B/C to A/(B*C). */
8392 if (flag_unsafe_math_optimizations
8393 && TREE_CODE (arg0
) == RDIV_EXPR
)
8394 return fold_build2 (RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
8395 fold_build2 (MULT_EXPR
, type
,
8396 TREE_OPERAND (arg0
, 1), arg1
));
8398 /* Convert A/(B/C) to (A/B)*C. */
8399 if (flag_unsafe_math_optimizations
8400 && TREE_CODE (arg1
) == RDIV_EXPR
)
8401 return fold_build2 (MULT_EXPR
, type
,
8402 fold_build2 (RDIV_EXPR
, type
, arg0
,
8403 TREE_OPERAND (arg1
, 0)),
8404 TREE_OPERAND (arg1
, 1));
8406 /* Convert C1/(X*C2) into (C1/C2)/X. */
8407 if (flag_unsafe_math_optimizations
8408 && TREE_CODE (arg1
) == MULT_EXPR
8409 && TREE_CODE (arg0
) == REAL_CST
8410 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
8412 tree tem
= const_binop (RDIV_EXPR
, arg0
,
8413 TREE_OPERAND (arg1
, 1), 0);
8415 return fold_build2 (RDIV_EXPR
, type
, tem
,
8416 TREE_OPERAND (arg1
, 0));
8419 if (flag_unsafe_math_optimizations
)
8421 enum built_in_function fcode
= builtin_mathfn_code (arg1
);
8422 /* Optimize x/expN(y) into x*expN(-y). */
8423 if (BUILTIN_EXPONENT_P (fcode
))
8425 tree expfn
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0);
8426 tree arg
= negate_expr (TREE_VALUE (TREE_OPERAND (arg1
, 1)));
8427 tree arglist
= build_tree_list (NULL_TREE
,
8428 fold_convert (type
, arg
));
8429 arg1
= build_function_call_expr (expfn
, arglist
);
8430 return fold_build2 (MULT_EXPR
, type
, arg0
, arg1
);
8433 /* Optimize x/pow(y,z) into x*pow(y,-z). */
8434 if (fcode
== BUILT_IN_POW
8435 || fcode
== BUILT_IN_POWF
8436 || fcode
== BUILT_IN_POWL
)
8438 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0);
8439 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
8440 tree arg11
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1
, 1)));
8441 tree neg11
= fold_convert (type
, negate_expr (arg11
));
8442 tree arglist
= tree_cons(NULL_TREE
, arg10
,
8443 build_tree_list (NULL_TREE
, neg11
));
8444 arg1
= build_function_call_expr (powfn
, arglist
);
8445 return fold_build2 (MULT_EXPR
, type
, arg0
, arg1
);
8449 if (flag_unsafe_math_optimizations
)
8451 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
8452 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
8454 /* Optimize sin(x)/cos(x) as tan(x). */
8455 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
8456 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
8457 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
8458 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0
, 1)),
8459 TREE_VALUE (TREE_OPERAND (arg1
, 1)), 0))
8461 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
8463 if (tanfn
!= NULL_TREE
)
8464 return build_function_call_expr (tanfn
,
8465 TREE_OPERAND (arg0
, 1));
8468 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
8469 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
8470 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
8471 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
8472 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0
, 1)),
8473 TREE_VALUE (TREE_OPERAND (arg1
, 1)), 0))
8475 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
8477 if (tanfn
!= NULL_TREE
)
8479 tree tmp
= TREE_OPERAND (arg0
, 1);
8480 tmp
= build_function_call_expr (tanfn
, tmp
);
8481 return fold_build2 (RDIV_EXPR
, type
,
8482 build_real (type
, dconst1
), tmp
);
8486 /* Optimize pow(x,c)/x as pow(x,c-1). */
8487 if (fcode0
== BUILT_IN_POW
8488 || fcode0
== BUILT_IN_POWF
8489 || fcode0
== BUILT_IN_POWL
)
8491 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
8492 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
, 1)));
8493 if (TREE_CODE (arg01
) == REAL_CST
8494 && ! TREE_CONSTANT_OVERFLOW (arg01
)
8495 && operand_equal_p (arg1
, arg00
, 0))
8497 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
8501 c
= TREE_REAL_CST (arg01
);
8502 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
8503 arg
= build_real (type
, c
);
8504 arglist
= build_tree_list (NULL_TREE
, arg
);
8505 arglist
= tree_cons (NULL_TREE
, arg1
, arglist
);
8506 return build_function_call_expr (powfn
, arglist
);
8512 case TRUNC_DIV_EXPR
:
8513 case ROUND_DIV_EXPR
:
8514 case FLOOR_DIV_EXPR
:
8516 case EXACT_DIV_EXPR
:
8517 if (integer_onep (arg1
))
8518 return non_lvalue (fold_convert (type
, arg0
));
8519 if (integer_zerop (arg1
))
8522 if (!TYPE_UNSIGNED (type
)
8523 && TREE_CODE (arg1
) == INTEGER_CST
8524 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
8525 && TREE_INT_CST_HIGH (arg1
) == -1)
8526 return fold_convert (type
, negate_expr (arg0
));
8528 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
8529 operation, EXACT_DIV_EXPR.
8531 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
8532 At one time others generated faster code, it's not clear if they do
8533 after the last round to changes to the DIV code in expmed.c. */
8534 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
8535 && multiple_of_p (type
, arg0
, arg1
))
8536 return fold_build2 (EXACT_DIV_EXPR
, type
, arg0
, arg1
);
8538 if (TREE_CODE (arg1
) == INTEGER_CST
8539 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
)))
8540 return fold_convert (type
, tem
);
8545 case FLOOR_MOD_EXPR
:
8546 case ROUND_MOD_EXPR
:
8547 case TRUNC_MOD_EXPR
:
8548 /* X % 1 is always zero, but be sure to preserve any side
8550 if (integer_onep (arg1
))
8551 return omit_one_operand (type
, integer_zero_node
, arg0
);
8553 /* X % 0, return X % 0 unchanged so that we can get the
8554 proper warnings and errors. */
8555 if (integer_zerop (arg1
))
8558 /* 0 % X is always zero, but be sure to preserve any side
8559 effects in X. Place this after checking for X == 0. */
8560 if (integer_zerop (arg0
))
8561 return omit_one_operand (type
, integer_zero_node
, arg1
);
8563 /* X % -1 is zero. */
8564 if (!TYPE_UNSIGNED (type
)
8565 && TREE_CODE (arg1
) == INTEGER_CST
8566 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
8567 && TREE_INT_CST_HIGH (arg1
) == -1)
8568 return omit_one_operand (type
, integer_zero_node
, arg0
);
8570 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
8571 i.e. "X % C" into "X & C2", if X and C are positive. */
8572 if ((code
== TRUNC_MOD_EXPR
|| code
== FLOOR_MOD_EXPR
)
8573 && (TYPE_UNSIGNED (type
) || tree_expr_nonnegative_p (arg0
))
8574 && integer_pow2p (arg1
) && tree_int_cst_sgn (arg1
) >= 0)
8576 unsigned HOST_WIDE_INT high
, low
;
8580 l
= tree_log2 (arg1
);
8581 if (l
>= HOST_BITS_PER_WIDE_INT
)
8583 high
= ((unsigned HOST_WIDE_INT
) 1
8584 << (l
- HOST_BITS_PER_WIDE_INT
)) - 1;
8590 low
= ((unsigned HOST_WIDE_INT
) 1 << l
) - 1;
8593 mask
= build_int_cst_wide (type
, low
, high
);
8594 return fold_build2 (BIT_AND_EXPR
, type
,
8595 fold_convert (type
, arg0
), mask
);
8598 /* X % -C is the same as X % C. */
8599 if (code
== TRUNC_MOD_EXPR
8600 && !TYPE_UNSIGNED (type
)
8601 && TREE_CODE (arg1
) == INTEGER_CST
8602 && !TREE_CONSTANT_OVERFLOW (arg1
)
8603 && TREE_INT_CST_HIGH (arg1
) < 0
8605 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
8606 && !sign_bit_p (arg1
, arg1
))
8607 return fold_build2 (code
, type
, fold_convert (type
, arg0
),
8608 fold_convert (type
, negate_expr (arg1
)));
8610 /* X % -Y is the same as X % Y. */
8611 if (code
== TRUNC_MOD_EXPR
8612 && !TYPE_UNSIGNED (type
)
8613 && TREE_CODE (arg1
) == NEGATE_EXPR
8615 return fold_build2 (code
, type
, fold_convert (type
, arg0
),
8616 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
8618 if (TREE_CODE (arg1
) == INTEGER_CST
8619 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
)))
8620 return fold_convert (type
, tem
);
8626 if (integer_all_onesp (arg0
))
8627 return omit_one_operand (type
, arg0
, arg1
);
8631 /* Optimize -1 >> x for arithmetic right shifts. */
8632 if (integer_all_onesp (arg0
) && !TYPE_UNSIGNED (type
))
8633 return omit_one_operand (type
, arg0
, arg1
);
8634 /* ... fall through ... */
8638 if (integer_zerop (arg1
))
8639 return non_lvalue (fold_convert (type
, arg0
));
8640 if (integer_zerop (arg0
))
8641 return omit_one_operand (type
, arg0
, arg1
);
8643 /* Since negative shift count is not well-defined,
8644 don't try to compute it in the compiler. */
8645 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
8648 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
8649 if (TREE_CODE (op0
) == code
&& host_integerp (arg1
, false)
8650 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
8651 && host_integerp (TREE_OPERAND (arg0
, 1), false)
8652 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
8654 HOST_WIDE_INT low
= (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1))
8655 + TREE_INT_CST_LOW (arg1
));
8657 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
8658 being well defined. */
8659 if (low
>= TYPE_PRECISION (type
))
8661 if (code
== LROTATE_EXPR
|| code
== RROTATE_EXPR
)
8662 low
= low
% TYPE_PRECISION (type
);
8663 else if (TYPE_UNSIGNED (type
) || code
== LSHIFT_EXPR
)
8664 return build_int_cst (type
, 0);
8666 low
= TYPE_PRECISION (type
) - 1;
8669 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
8670 build_int_cst (type
, low
));
8673 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
8674 into x & ((unsigned)-1 >> c) for unsigned types. */
8675 if (((code
== LSHIFT_EXPR
&& TREE_CODE (arg0
) == RSHIFT_EXPR
)
8676 || (TYPE_UNSIGNED (type
)
8677 && code
== RSHIFT_EXPR
&& TREE_CODE (arg0
) == LSHIFT_EXPR
))
8678 && host_integerp (arg1
, false)
8679 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
8680 && host_integerp (TREE_OPERAND (arg0
, 1), false)
8681 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
8683 HOST_WIDE_INT low0
= TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1));
8684 HOST_WIDE_INT low1
= TREE_INT_CST_LOW (arg1
);
8690 arg00
= fold_convert (type
, TREE_OPERAND (arg0
, 0));
8692 lshift
= build_int_cst (type
, -1);
8693 lshift
= int_const_binop (code
, lshift
, arg1
, 0);
8695 return fold_build2 (BIT_AND_EXPR
, type
, arg00
, lshift
);
8699 /* Rewrite an LROTATE_EXPR by a constant into an
8700 RROTATE_EXPR by a new constant. */
8701 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
8703 tree tem
= build_int_cst (NULL_TREE
,
8704 GET_MODE_BITSIZE (TYPE_MODE (type
)));
8705 tem
= fold_convert (TREE_TYPE (arg1
), tem
);
8706 tem
= const_binop (MINUS_EXPR
, tem
, arg1
, 0);
8707 return fold_build2 (RROTATE_EXPR
, type
, arg0
, tem
);
8710 /* If we have a rotate of a bit operation with the rotate count and
8711 the second operand of the bit operation both constant,
8712 permute the two operations. */
8713 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
8714 && (TREE_CODE (arg0
) == BIT_AND_EXPR
8715 || TREE_CODE (arg0
) == BIT_IOR_EXPR
8716 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
8717 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8718 return fold_build2 (TREE_CODE (arg0
), type
,
8719 fold_build2 (code
, type
,
8720 TREE_OPERAND (arg0
, 0), arg1
),
8721 fold_build2 (code
, type
,
8722 TREE_OPERAND (arg0
, 1), arg1
));
8724 /* Two consecutive rotates adding up to the width of the mode can
8726 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
8727 && TREE_CODE (arg0
) == RROTATE_EXPR
8728 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8729 && TREE_INT_CST_HIGH (arg1
) == 0
8730 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == 0
8731 && ((TREE_INT_CST_LOW (arg1
)
8732 + TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)))
8733 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type
))))
8734 return TREE_OPERAND (arg0
, 0);
8739 if (operand_equal_p (arg0
, arg1
, 0))
8740 return omit_one_operand (type
, arg0
, arg1
);
8741 if (INTEGRAL_TYPE_P (type
)
8742 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), OEP_ONLY_CONST
))
8743 return omit_one_operand (type
, arg1
, arg0
);
8747 if (operand_equal_p (arg0
, arg1
, 0))
8748 return omit_one_operand (type
, arg0
, arg1
);
8749 if (INTEGRAL_TYPE_P (type
)
8750 && TYPE_MAX_VALUE (type
)
8751 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), OEP_ONLY_CONST
))
8752 return omit_one_operand (type
, arg1
, arg0
);
8755 case TRUTH_ANDIF_EXPR
:
8756 /* Note that the operands of this must be ints
8757 and their values must be 0 or 1.
8758 ("true" is a fixed value perhaps depending on the language.) */
8759 /* If first arg is constant zero, return it. */
8760 if (integer_zerop (arg0
))
8761 return fold_convert (type
, arg0
);
8762 case TRUTH_AND_EXPR
:
8763 /* If either arg is constant true, drop it. */
8764 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
8765 return non_lvalue (fold_convert (type
, arg1
));
8766 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
8767 /* Preserve sequence points. */
8768 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
8769 return non_lvalue (fold_convert (type
, arg0
));
8770 /* If second arg is constant zero, result is zero, but first arg
8771 must be evaluated. */
8772 if (integer_zerop (arg1
))
8773 return omit_one_operand (type
, arg1
, arg0
);
8774 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
8775 case will be handled here. */
8776 if (integer_zerop (arg0
))
8777 return omit_one_operand (type
, arg0
, arg1
);
8779 /* !X && X is always false. */
8780 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
8781 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
8782 return omit_one_operand (type
, integer_zero_node
, arg1
);
8783 /* X && !X is always false. */
8784 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
8785 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
8786 return omit_one_operand (type
, integer_zero_node
, arg0
);
8788 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
8789 means A >= Y && A != MAX, but in this case we know that
8792 if (!TREE_SIDE_EFFECTS (arg0
)
8793 && !TREE_SIDE_EFFECTS (arg1
))
8795 tem
= fold_to_nonsharp_ineq_using_bound (arg0
, arg1
);
8796 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
8797 return fold_build2 (code
, type
, tem
, arg1
);
8799 tem
= fold_to_nonsharp_ineq_using_bound (arg1
, arg0
);
8800 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
8801 return fold_build2 (code
, type
, arg0
, tem
);
8805 /* We only do these simplifications if we are optimizing. */
8809 /* Check for things like (A || B) && (A || C). We can convert this
8810 to A || (B && C). Note that either operator can be any of the four
8811 truth and/or operations and the transformation will still be
8812 valid. Also note that we only care about order for the
8813 ANDIF and ORIF operators. If B contains side effects, this
8814 might change the truth-value of A. */
8815 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8816 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
8817 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
8818 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
8819 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
8820 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
8822 tree a00
= TREE_OPERAND (arg0
, 0);
8823 tree a01
= TREE_OPERAND (arg0
, 1);
8824 tree a10
= TREE_OPERAND (arg1
, 0);
8825 tree a11
= TREE_OPERAND (arg1
, 1);
8826 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
8827 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
8828 && (code
== TRUTH_AND_EXPR
8829 || code
== TRUTH_OR_EXPR
));
8831 if (operand_equal_p (a00
, a10
, 0))
8832 return fold_build2 (TREE_CODE (arg0
), type
, a00
,
8833 fold_build2 (code
, type
, a01
, a11
));
8834 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
8835 return fold_build2 (TREE_CODE (arg0
), type
, a00
,
8836 fold_build2 (code
, type
, a01
, a10
));
8837 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
8838 return fold_build2 (TREE_CODE (arg0
), type
, a01
,
8839 fold_build2 (code
, type
, a00
, a11
));
8841 /* This case if tricky because we must either have commutative
8842 operators or else A10 must not have side-effects. */
8844 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
8845 && operand_equal_p (a01
, a11
, 0))
8846 return fold_build2 (TREE_CODE (arg0
), type
,
8847 fold_build2 (code
, type
, a00
, a10
),
8851 /* See if we can build a range comparison. */
8852 if (0 != (tem
= fold_range_test (code
, type
, op0
, op1
)))
8855 /* Check for the possibility of merging component references. If our
8856 lhs is another similar operation, try to merge its rhs with our
8857 rhs. Then try to merge our lhs and rhs. */
8858 if (TREE_CODE (arg0
) == code
8859 && 0 != (tem
= fold_truthop (code
, type
,
8860 TREE_OPERAND (arg0
, 1), arg1
)))
8861 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
8863 if ((tem
= fold_truthop (code
, type
, arg0
, arg1
)) != 0)
8868 case TRUTH_ORIF_EXPR
:
8869 /* Note that the operands of this must be ints
8870 and their values must be 0 or true.
8871 ("true" is a fixed value perhaps depending on the language.) */
8872 /* If first arg is constant true, return it. */
8873 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
8874 return fold_convert (type
, arg0
);
8876 /* If either arg is constant zero, drop it. */
8877 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
8878 return non_lvalue (fold_convert (type
, arg1
));
8879 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
8880 /* Preserve sequence points. */
8881 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
8882 return non_lvalue (fold_convert (type
, arg0
));
8883 /* If second arg is constant true, result is true, but we must
8884 evaluate first arg. */
8885 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
8886 return omit_one_operand (type
, arg1
, arg0
);
8887 /* Likewise for first arg, but note this only occurs here for
8889 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
8890 return omit_one_operand (type
, arg0
, arg1
);
8892 /* !X || X is always true. */
8893 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
8894 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
8895 return omit_one_operand (type
, integer_one_node
, arg1
);
8896 /* X || !X is always true. */
8897 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
8898 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
8899 return omit_one_operand (type
, integer_one_node
, arg0
);
8903 case TRUTH_XOR_EXPR
:
8904 /* If the second arg is constant zero, drop it. */
8905 if (integer_zerop (arg1
))
8906 return non_lvalue (fold_convert (type
, arg0
));
8907 /* If the second arg is constant true, this is a logical inversion. */
8908 if (integer_onep (arg1
))
8910 /* Only call invert_truthvalue if operand is a truth value. */
8911 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
8912 tem
= fold_build1 (TRUTH_NOT_EXPR
, TREE_TYPE (arg0
), arg0
);
8914 tem
= invert_truthvalue (arg0
);
8915 return non_lvalue (fold_convert (type
, tem
));
8917 /* Identical arguments cancel to zero. */
8918 if (operand_equal_p (arg0
, arg1
, 0))
8919 return omit_one_operand (type
, integer_zero_node
, arg0
);
8921 /* !X ^ X is always true. */
8922 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
8923 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
8924 return omit_one_operand (type
, integer_one_node
, arg1
);
8926 /* X ^ !X is always true. */
8927 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
8928 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
8929 return omit_one_operand (type
, integer_one_node
, arg0
);
8939 /* If one arg is a real or integer constant, put it last. */
8940 if (tree_swap_operands_p (arg0
, arg1
, true))
8941 return fold_build2 (swap_tree_comparison (code
), type
, op1
, op0
);
8943 /* bool_var != 0 becomes bool_var. */
8944 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
8946 return non_lvalue (fold_convert (type
, arg0
));
8948 /* bool_var == 1 becomes bool_var. */
8949 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
8951 return non_lvalue (fold_convert (type
, arg0
));
8953 /* If this is an equality comparison of the address of a non-weak
8954 object against zero, then we know the result. */
8955 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
8956 && TREE_CODE (arg0
) == ADDR_EXPR
8957 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0
, 0))
8958 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
8959 && integer_zerop (arg1
))
8960 return constant_boolean_node (code
!= EQ_EXPR
, type
);
8962 /* If this is an equality comparison of the address of two non-weak,
8963 unaliased symbols neither of which are extern (since we do not
8964 have access to attributes for externs), then we know the result. */
8965 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
8966 && TREE_CODE (arg0
) == ADDR_EXPR
8967 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0
, 0))
8968 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
8969 && ! lookup_attribute ("alias",
8970 DECL_ATTRIBUTES (TREE_OPERAND (arg0
, 0)))
8971 && ! DECL_EXTERNAL (TREE_OPERAND (arg0
, 0))
8972 && TREE_CODE (arg1
) == ADDR_EXPR
8973 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1
, 0))
8974 && ! DECL_WEAK (TREE_OPERAND (arg1
, 0))
8975 && ! lookup_attribute ("alias",
8976 DECL_ATTRIBUTES (TREE_OPERAND (arg1
, 0)))
8977 && ! DECL_EXTERNAL (TREE_OPERAND (arg1
, 0)))
8979 /* We know that we're looking at the address of two
8980 non-weak, unaliased, static _DECL nodes.
8982 It is both wasteful and incorrect to call operand_equal_p
8983 to compare the two ADDR_EXPR nodes. It is wasteful in that
8984 all we need to do is test pointer equality for the arguments
8985 to the two ADDR_EXPR nodes. It is incorrect to use
8986 operand_equal_p as that function is NOT equivalent to a
8987 C equality test. It can in fact return false for two
8988 objects which would test as equal using the C equality
8990 bool equal
= TREE_OPERAND (arg0
, 0) == TREE_OPERAND (arg1
, 0);
8991 return constant_boolean_node (equal
8992 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
8996 /* If this is a comparison of two exprs that look like an
8997 ARRAY_REF of the same object, then we can fold this to a
8998 comparison of the two offsets. This is only safe for
8999 EQ_EXPR and NE_EXPR because of overflow issues. */
9000 if (code
== EQ_EXPR
|| code
== NE_EXPR
)
9002 tree base0
, offset0
, base1
, offset1
;
9004 if (extract_array_ref (arg0
, &base0
, &offset0
)
9005 && extract_array_ref (arg1
, &base1
, &offset1
)
9006 && operand_equal_p (base0
, base1
, 0))
9008 /* Handle no offsets on both sides specially. */
9009 if (offset0
== NULL_TREE
9010 && offset1
== NULL_TREE
)
9011 return fold_build2 (code
, type
, integer_zero_node
,
9014 if (!offset0
|| !offset1
9015 || TREE_TYPE (offset0
) == TREE_TYPE (offset1
))
9017 if (offset0
== NULL_TREE
)
9018 offset0
= build_int_cst (TREE_TYPE (offset1
), 0);
9019 if (offset1
== NULL_TREE
)
9020 offset1
= build_int_cst (TREE_TYPE (offset0
), 0);
9021 return fold_build2 (code
, type
, offset0
, offset1
);
9026 /* Transform comparisons of the form X +- C CMP X. */
9027 if ((code
!= EQ_EXPR
&& code
!= NE_EXPR
)
9028 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
9029 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
9030 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
9031 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
))))
9032 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9033 && !TYPE_UNSIGNED (TREE_TYPE (arg1
))
9034 && !(flag_wrapv
|| flag_trapv
))))
9036 tree arg01
= TREE_OPERAND (arg0
, 1);
9037 enum tree_code code0
= TREE_CODE (arg0
);
9040 if (TREE_CODE (arg01
) == REAL_CST
)
9041 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
9043 is_positive
= tree_int_cst_sgn (arg01
);
9045 /* (X - c) > X becomes false. */
9047 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
9048 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
9049 return constant_boolean_node (0, type
);
9051 /* Likewise (X + c) < X becomes false. */
9053 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
9054 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
9055 return constant_boolean_node (0, type
);
9057 /* Convert (X - c) <= X to true. */
9058 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
9060 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
9061 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
9062 return constant_boolean_node (1, type
);
9064 /* Convert (X + c) >= X to true. */
9065 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
9067 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
9068 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
9069 return constant_boolean_node (1, type
);
9071 if (TREE_CODE (arg01
) == INTEGER_CST
)
9073 /* Convert X + c > X and X - c < X to true for integers. */
9075 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
9076 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
9077 return constant_boolean_node (1, type
);
9080 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
9081 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
9082 return constant_boolean_node (1, type
);
9084 /* Convert X + c <= X and X - c >= X to false for integers. */
9086 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
9087 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
9088 return constant_boolean_node (0, type
);
9091 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
9092 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
9093 return constant_boolean_node (0, type
);
9097 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
9098 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
9099 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9100 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
9101 && !TYPE_UNSIGNED (TREE_TYPE (arg1
))
9102 && !(flag_wrapv
|| flag_trapv
))
9103 && (TREE_CODE (arg1
) == INTEGER_CST
9104 && !TREE_OVERFLOW (arg1
)))
9106 tree const1
= TREE_OPERAND (arg0
, 1);
9108 tree variable
= TREE_OPERAND (arg0
, 0);
9111 lhs_add
= TREE_CODE (arg0
) != PLUS_EXPR
;
9113 lhs
= fold_build2 (lhs_add
? PLUS_EXPR
: MINUS_EXPR
,
9114 TREE_TYPE (arg1
), const2
, const1
);
9115 if (TREE_CODE (lhs
) == TREE_CODE (arg1
)
9116 && (TREE_CODE (lhs
) != INTEGER_CST
9117 || !TREE_OVERFLOW (lhs
)))
9118 return fold_build2 (code
, type
, variable
, lhs
);
9121 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9123 tree targ0
= strip_float_extensions (arg0
);
9124 tree targ1
= strip_float_extensions (arg1
);
9125 tree newtype
= TREE_TYPE (targ0
);
9127 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
9128 newtype
= TREE_TYPE (targ1
);
9130 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9131 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
9132 return fold_build2 (code
, type
, fold_convert (newtype
, targ0
),
9133 fold_convert (newtype
, targ1
));
9135 /* (-a) CMP (-b) -> b CMP a */
9136 if (TREE_CODE (arg0
) == NEGATE_EXPR
9137 && TREE_CODE (arg1
) == NEGATE_EXPR
)
9138 return fold_build2 (code
, type
, TREE_OPERAND (arg1
, 0),
9139 TREE_OPERAND (arg0
, 0));
9141 if (TREE_CODE (arg1
) == REAL_CST
)
9143 REAL_VALUE_TYPE cst
;
9144 cst
= TREE_REAL_CST (arg1
);
9146 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9147 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
9149 fold_build2 (swap_tree_comparison (code
), type
,
9150 TREE_OPERAND (arg0
, 0),
9151 build_real (TREE_TYPE (arg1
),
9152 REAL_VALUE_NEGATE (cst
)));
9154 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9155 /* a CMP (-0) -> a CMP 0 */
9156 if (REAL_VALUE_MINUS_ZERO (cst
))
9157 return fold_build2 (code
, type
, arg0
,
9158 build_real (TREE_TYPE (arg1
), dconst0
));
9160 /* x != NaN is always true, other ops are always false. */
9161 if (REAL_VALUE_ISNAN (cst
)
9162 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
))))
9164 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
9165 return omit_one_operand (type
, tem
, arg0
);
9168 /* Fold comparisons against infinity. */
9169 if (REAL_VALUE_ISINF (cst
))
9171 tem
= fold_inf_compare (code
, type
, arg0
, arg1
);
9172 if (tem
!= NULL_TREE
)
9177 /* If this is a comparison of a real constant with a PLUS_EXPR
9178 or a MINUS_EXPR of a real constant, we can convert it into a
9179 comparison with a revised real constant as long as no overflow
9180 occurs when unsafe_math_optimizations are enabled. */
9181 if (flag_unsafe_math_optimizations
9182 && TREE_CODE (arg1
) == REAL_CST
9183 && (TREE_CODE (arg0
) == PLUS_EXPR
9184 || TREE_CODE (arg0
) == MINUS_EXPR
)
9185 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
9186 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
9187 ? MINUS_EXPR
: PLUS_EXPR
,
9188 arg1
, TREE_OPERAND (arg0
, 1), 0))
9189 && ! TREE_CONSTANT_OVERFLOW (tem
))
9190 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9192 /* Likewise, we can simplify a comparison of a real constant with
9193 a MINUS_EXPR whose first operand is also a real constant, i.e.
9194 (c1 - x) < c2 becomes x > c1-c2. */
9195 if (flag_unsafe_math_optimizations
9196 && TREE_CODE (arg1
) == REAL_CST
9197 && TREE_CODE (arg0
) == MINUS_EXPR
9198 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
9199 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
9201 && ! TREE_CONSTANT_OVERFLOW (tem
))
9202 return fold_build2 (swap_tree_comparison (code
), type
,
9203 TREE_OPERAND (arg0
, 1), tem
);
9205 /* Fold comparisons against built-in math functions. */
9206 if (TREE_CODE (arg1
) == REAL_CST
9207 && flag_unsafe_math_optimizations
9208 && ! flag_errno_math
)
9210 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
9212 if (fcode
!= END_BUILTINS
)
9214 tem
= fold_mathfn_compare (fcode
, code
, type
, arg0
, arg1
);
9215 if (tem
!= NULL_TREE
)
9221 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
9222 if (TREE_CONSTANT (arg1
)
9223 && (TREE_CODE (arg0
) == POSTINCREMENT_EXPR
9224 || TREE_CODE (arg0
) == POSTDECREMENT_EXPR
)
9225 /* This optimization is invalid for ordered comparisons
9226 if CONST+INCR overflows or if foo+incr might overflow.
9227 This optimization is invalid for floating point due to rounding.
9228 For pointer types we assume overflow doesn't happen. */
9229 && (POINTER_TYPE_P (TREE_TYPE (arg0
))
9230 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
9231 && (code
== EQ_EXPR
|| code
== NE_EXPR
))))
9233 tree varop
, newconst
;
9235 if (TREE_CODE (arg0
) == POSTINCREMENT_EXPR
)
9237 newconst
= fold_build2 (PLUS_EXPR
, TREE_TYPE (arg0
),
9238 arg1
, TREE_OPERAND (arg0
, 1));
9239 varop
= build2 (PREINCREMENT_EXPR
, TREE_TYPE (arg0
),
9240 TREE_OPERAND (arg0
, 0),
9241 TREE_OPERAND (arg0
, 1));
9245 newconst
= fold_build2 (MINUS_EXPR
, TREE_TYPE (arg0
),
9246 arg1
, TREE_OPERAND (arg0
, 1));
9247 varop
= build2 (PREDECREMENT_EXPR
, TREE_TYPE (arg0
),
9248 TREE_OPERAND (arg0
, 0),
9249 TREE_OPERAND (arg0
, 1));
9253 /* If VAROP is a reference to a bitfield, we must mask
9254 the constant by the width of the field. */
9255 if (TREE_CODE (TREE_OPERAND (varop
, 0)) == COMPONENT_REF
9256 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop
, 0), 1))
9257 && host_integerp (DECL_SIZE (TREE_OPERAND
9258 (TREE_OPERAND (varop
, 0), 1)), 1))
9260 tree fielddecl
= TREE_OPERAND (TREE_OPERAND (varop
, 0), 1);
9261 HOST_WIDE_INT size
= tree_low_cst (DECL_SIZE (fielddecl
), 1);
9262 tree folded_compare
, shift
;
9264 /* First check whether the comparison would come out
9265 always the same. If we don't do that we would
9266 change the meaning with the masking. */
9267 folded_compare
= fold_build2 (code
, type
,
9268 TREE_OPERAND (varop
, 0), arg1
);
9269 if (integer_zerop (folded_compare
)
9270 || integer_onep (folded_compare
))
9271 return omit_one_operand (type
, folded_compare
, varop
);
9273 shift
= build_int_cst (NULL_TREE
,
9274 TYPE_PRECISION (TREE_TYPE (varop
)) - size
);
9275 shift
= fold_convert (TREE_TYPE (varop
), shift
);
9276 newconst
= fold_build2 (LSHIFT_EXPR
, TREE_TYPE (varop
),
9278 newconst
= fold_build2 (RSHIFT_EXPR
, TREE_TYPE (varop
),
9282 return fold_build2 (code
, type
, varop
, newconst
);
9285 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
9286 This transformation affects the cases which are handled in later
9287 optimizations involving comparisons with non-negative constants. */
9288 if (TREE_CODE (arg1
) == INTEGER_CST
9289 && TREE_CODE (arg0
) != INTEGER_CST
9290 && tree_int_cst_sgn (arg1
) > 0)
9295 arg1
= const_binop (MINUS_EXPR
, arg1
,
9296 build_int_cst (TREE_TYPE (arg1
), 1), 0);
9297 return fold_build2 (GT_EXPR
, type
, arg0
,
9298 fold_convert (TREE_TYPE (arg0
), arg1
));
9301 arg1
= const_binop (MINUS_EXPR
, arg1
,
9302 build_int_cst (TREE_TYPE (arg1
), 1), 0);
9303 return fold_build2 (LE_EXPR
, type
, arg0
,
9304 fold_convert (TREE_TYPE (arg0
), arg1
));
9311 /* Comparisons with the highest or lowest possible integer of
9312 the specified size will have known values. */
9314 int width
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1
)));
9316 if (TREE_CODE (arg1
) == INTEGER_CST
9317 && ! TREE_CONSTANT_OVERFLOW (arg1
)
9318 && width
<= 2 * HOST_BITS_PER_WIDE_INT
9319 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
9320 || POINTER_TYPE_P (TREE_TYPE (arg1
))))
9322 HOST_WIDE_INT signed_max_hi
;
9323 unsigned HOST_WIDE_INT signed_max_lo
;
9324 unsigned HOST_WIDE_INT max_hi
, max_lo
, min_hi
, min_lo
;
9326 if (width
<= HOST_BITS_PER_WIDE_INT
)
9328 signed_max_lo
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
9333 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
9335 max_lo
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
9341 max_lo
= signed_max_lo
;
9342 min_lo
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
9348 width
-= HOST_BITS_PER_WIDE_INT
;
9350 signed_max_hi
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
9355 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
9357 max_hi
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
9362 max_hi
= signed_max_hi
;
9363 min_hi
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
9367 if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
) == max_hi
9368 && TREE_INT_CST_LOW (arg1
) == max_lo
)
9372 return omit_one_operand (type
, integer_zero_node
, arg0
);
9375 return fold_build2 (EQ_EXPR
, type
, op0
, op1
);
9378 return omit_one_operand (type
, integer_one_node
, arg0
);
9381 return fold_build2 (NE_EXPR
, type
, op0
, op1
);
9383 /* The GE_EXPR and LT_EXPR cases above are not normally
9384 reached because of previous transformations. */
9389 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
9391 && TREE_INT_CST_LOW (arg1
) == max_lo
- 1)
9395 arg1
= const_binop (PLUS_EXPR
, arg1
, integer_one_node
, 0);
9396 return fold_build2 (EQ_EXPR
, type
,
9397 fold_convert (TREE_TYPE (arg1
), arg0
),
9400 arg1
= const_binop (PLUS_EXPR
, arg1
, integer_one_node
, 0);
9401 return fold_build2 (NE_EXPR
, type
,
9402 fold_convert (TREE_TYPE (arg1
), arg0
),
9407 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
9409 && TREE_INT_CST_LOW (arg1
) == min_lo
)
9413 return omit_one_operand (type
, integer_zero_node
, arg0
);
9416 return fold_build2 (EQ_EXPR
, type
, op0
, op1
);
9419 return omit_one_operand (type
, integer_one_node
, arg0
);
9422 return fold_build2 (NE_EXPR
, type
, op0
, op1
);
9427 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
9429 && TREE_INT_CST_LOW (arg1
) == min_lo
+ 1)
9433 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
9434 return fold_build2 (NE_EXPR
, type
,
9435 fold_convert (TREE_TYPE (arg1
), arg0
),
9438 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
9439 return fold_build2 (EQ_EXPR
, type
,
9440 fold_convert (TREE_TYPE (arg1
), arg0
),
9446 else if (!in_gimple_form
9447 && TREE_INT_CST_HIGH (arg1
) == signed_max_hi
9448 && TREE_INT_CST_LOW (arg1
) == signed_max_lo
9449 && TYPE_UNSIGNED (TREE_TYPE (arg1
))
9450 /* signed_type does not work on pointer types. */
9451 && INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
9453 /* The following case also applies to X < signed_max+1
9454 and X >= signed_max+1 because previous transformations. */
9455 if (code
== LE_EXPR
|| code
== GT_EXPR
)
9458 st
= lang_hooks
.types
.signed_type (TREE_TYPE (arg1
));
9459 return fold_build2 (code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
9460 type
, fold_convert (st
, arg0
),
9461 build_int_cst (st
, 0));
9467 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
9468 a MINUS_EXPR of a constant, we can convert it into a comparison with
9469 a revised constant as long as no overflow occurs. */
9470 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9471 && TREE_CODE (arg1
) == INTEGER_CST
9472 && (TREE_CODE (arg0
) == PLUS_EXPR
9473 || TREE_CODE (arg0
) == MINUS_EXPR
)
9474 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9475 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
9476 ? MINUS_EXPR
: PLUS_EXPR
,
9477 fold_convert (TREE_TYPE (arg0
), arg1
),
9478 TREE_OPERAND (arg0
, 1), 0))
9479 && ! TREE_CONSTANT_OVERFLOW (tem
))
9480 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9482 /* Similarly for a NEGATE_EXPR. */
9483 else if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9484 && TREE_CODE (arg0
) == NEGATE_EXPR
9485 && TREE_CODE (arg1
) == INTEGER_CST
9486 && 0 != (tem
= negate_expr (arg1
))
9487 && TREE_CODE (tem
) == INTEGER_CST
9488 && ! TREE_CONSTANT_OVERFLOW (tem
))
9489 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9491 /* If we have X - Y == 0, we can convert that to X == Y and similarly
9492 for !=. Don't do this for ordered comparisons due to overflow. */
9493 else if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
9494 && integer_zerop (arg1
) && TREE_CODE (arg0
) == MINUS_EXPR
)
9495 return fold_build2 (code
, type
,
9496 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
9498 else if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
9499 && (TREE_CODE (arg0
) == NOP_EXPR
9500 || TREE_CODE (arg0
) == CONVERT_EXPR
))
9502 /* If we are widening one operand of an integer comparison,
9503 see if the other operand is similarly being widened. Perhaps we
9504 can do the comparison in the narrower type. */
9505 tem
= fold_widened_comparison (code
, type
, arg0
, arg1
);
9509 /* Or if we are changing signedness. */
9510 tem
= fold_sign_changed_comparison (code
, type
, arg0
, arg1
);
9515 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9516 constant, we can simplify it. */
9517 else if (TREE_CODE (arg1
) == INTEGER_CST
9518 && (TREE_CODE (arg0
) == MIN_EXPR
9519 || TREE_CODE (arg0
) == MAX_EXPR
)
9520 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9522 tem
= optimize_minmax_comparison (code
, type
, op0
, op1
);
9529 /* If we are comparing an ABS_EXPR with a constant, we can
9530 convert all the cases into explicit comparisons, but they may
9531 well not be faster than doing the ABS and one comparison.
9532 But ABS (X) <= C is a range comparison, which becomes a subtraction
9533 and a comparison, and is probably faster. */
9534 else if (code
== LE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
9535 && TREE_CODE (arg0
) == ABS_EXPR
9536 && ! TREE_SIDE_EFFECTS (arg0
)
9537 && (0 != (tem
= negate_expr (arg1
)))
9538 && TREE_CODE (tem
) == INTEGER_CST
9539 && ! TREE_CONSTANT_OVERFLOW (tem
))
9540 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
9541 build2 (GE_EXPR
, type
,
9542 TREE_OPERAND (arg0
, 0), tem
),
9543 build2 (LE_EXPR
, type
,
9544 TREE_OPERAND (arg0
, 0), arg1
));
9546 /* Convert ABS_EXPR<x> >= 0 to true. */
9547 else if (code
== GE_EXPR
9548 && tree_expr_nonnegative_p (arg0
)
9549 && (integer_zerop (arg1
)
9550 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
9551 && real_zerop (arg1
))))
9552 return omit_one_operand (type
, integer_one_node
, arg0
);
9554 /* Convert ABS_EXPR<x> < 0 to false. */
9555 else if (code
== LT_EXPR
9556 && tree_expr_nonnegative_p (arg0
)
9557 && (integer_zerop (arg1
) || real_zerop (arg1
)))
9558 return omit_one_operand (type
, integer_zero_node
, arg0
);
9560 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
9561 else if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9562 && TREE_CODE (arg0
) == ABS_EXPR
9563 && (integer_zerop (arg1
) || real_zerop (arg1
)))
9564 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), arg1
);
9566 /* If this is an EQ or NE comparison with zero and ARG0 is
9567 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
9568 two operations, but the latter can be done in one less insn
9569 on machines that have only two-operand insns or on which a
9570 constant cannot be the first operand. */
9571 if (integer_zerop (arg1
) && (code
== EQ_EXPR
|| code
== NE_EXPR
)
9572 && TREE_CODE (arg0
) == BIT_AND_EXPR
)
9574 tree arg00
= TREE_OPERAND (arg0
, 0);
9575 tree arg01
= TREE_OPERAND (arg0
, 1);
9576 if (TREE_CODE (arg00
) == LSHIFT_EXPR
9577 && integer_onep (TREE_OPERAND (arg00
, 0)))
9579 tree tem
= fold_build2 (RSHIFT_EXPR
, TREE_TYPE (arg00
),
9580 arg01
, TREE_OPERAND (arg00
, 1));
9581 tem
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
9582 build_int_cst (TREE_TYPE (arg0
), 1));
9583 return fold_build2 (code
, type
,
9584 fold_convert (TREE_TYPE (arg1
), tem
), arg1
);
9586 else if (TREE_CODE (arg01
) == LSHIFT_EXPR
9587 && integer_onep (TREE_OPERAND (arg01
, 0)))
9589 tree tem
= fold_build2 (RSHIFT_EXPR
, TREE_TYPE (arg01
),
9590 arg00
, TREE_OPERAND (arg01
, 1));
9591 tem
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
9592 build_int_cst (TREE_TYPE (arg0
), 1));
9593 return fold_build2 (code
, type
,
9594 fold_convert (TREE_TYPE (arg1
), tem
), arg1
);
9598 /* If this is an NE or EQ comparison of zero against the result of a
9599 signed MOD operation whose second operand is a power of 2, make
9600 the MOD operation unsigned since it is simpler and equivalent. */
9601 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
9602 && integer_zerop (arg1
)
9603 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
9604 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
9605 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
9606 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
9607 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
9608 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
9610 tree newtype
= lang_hooks
.types
.unsigned_type (TREE_TYPE (arg0
));
9611 tree newmod
= fold_build2 (TREE_CODE (arg0
), newtype
,
9612 fold_convert (newtype
,
9613 TREE_OPERAND (arg0
, 0)),
9614 fold_convert (newtype
,
9615 TREE_OPERAND (arg0
, 1)));
9617 return fold_build2 (code
, type
, newmod
,
9618 fold_convert (newtype
, arg1
));
9621 /* If this is an NE comparison of zero with an AND of one, remove the
9622 comparison since the AND will give the correct value. */
9623 if (code
== NE_EXPR
&& integer_zerop (arg1
)
9624 && TREE_CODE (arg0
) == BIT_AND_EXPR
9625 && integer_onep (TREE_OPERAND (arg0
, 1)))
9626 return fold_convert (type
, arg0
);
9628 /* If we have (A & C) == C where C is a power of 2, convert this into
9629 (A & C) != 0. Similarly for NE_EXPR. */
9630 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9631 && TREE_CODE (arg0
) == BIT_AND_EXPR
9632 && integer_pow2p (TREE_OPERAND (arg0
, 1))
9633 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
9634 return fold_build2 (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
9635 arg0
, fold_convert (TREE_TYPE (arg0
),
9636 integer_zero_node
));
9638 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
9639 bit, then fold the expression into A < 0 or A >= 0. */
9640 tem
= fold_single_bit_test_into_sign_test (code
, arg0
, arg1
, type
);
9644 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
9645 Similarly for NE_EXPR. */
9646 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9647 && TREE_CODE (arg0
) == BIT_AND_EXPR
9648 && TREE_CODE (arg1
) == INTEGER_CST
9649 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9651 tree notc
= fold_build1 (BIT_NOT_EXPR
,
9652 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
9653 TREE_OPERAND (arg0
, 1));
9654 tree dandnotc
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
9656 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
9657 if (integer_nonzerop (dandnotc
))
9658 return omit_one_operand (type
, rslt
, arg0
);
9661 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
9662 Similarly for NE_EXPR. */
9663 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9664 && TREE_CODE (arg0
) == BIT_IOR_EXPR
9665 && TREE_CODE (arg1
) == INTEGER_CST
9666 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9668 tree notd
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
);
9669 tree candnotd
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
9670 TREE_OPERAND (arg0
, 1), notd
);
9671 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
9672 if (integer_nonzerop (candnotd
))
9673 return omit_one_operand (type
, rslt
, arg0
);
9676 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
9677 and similarly for >= into !=. */
9678 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
9679 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
9680 && TREE_CODE (arg1
) == LSHIFT_EXPR
9681 && integer_onep (TREE_OPERAND (arg1
, 0)))
9682 return build2 (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
9683 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
9684 TREE_OPERAND (arg1
, 1)),
9685 fold_convert (TREE_TYPE (arg0
), integer_zero_node
));
9687 else if ((code
== LT_EXPR
|| code
== GE_EXPR
)
9688 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
9689 && (TREE_CODE (arg1
) == NOP_EXPR
9690 || TREE_CODE (arg1
) == CONVERT_EXPR
)
9691 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
9692 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
9694 build2 (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
9695 fold_convert (TREE_TYPE (arg0
),
9696 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
9697 TREE_OPERAND (TREE_OPERAND (arg1
, 0),
9699 fold_convert (TREE_TYPE (arg0
), integer_zero_node
));
9701 /* Simplify comparison of something with itself. (For IEEE
9702 floating-point, we can only do some of these simplifications.) */
9703 if (operand_equal_p (arg0
, arg1
, 0))
9708 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9709 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9710 return constant_boolean_node (1, type
);
9715 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9716 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9717 return constant_boolean_node (1, type
);
9718 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
9721 /* For NE, we can only do this simplification if integer
9722 or we don't honor IEEE floating point NaNs. */
9723 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
9724 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9726 /* ... fall through ... */
9729 return constant_boolean_node (0, type
);
9735 /* If we are comparing an expression that just has comparisons
9736 of two integer values, arithmetic expressions of those comparisons,
9737 and constants, we can simplify it. There are only three cases
9738 to check: the two values can either be equal, the first can be
9739 greater, or the second can be greater. Fold the expression for
9740 those three values. Since each value must be 0 or 1, we have
9741 eight possibilities, each of which corresponds to the constant 0
9742 or 1 or one of the six possible comparisons.
9744 This handles common cases like (a > b) == 0 but also handles
9745 expressions like ((x > y) - (y > x)) > 0, which supposedly
9746 occur in macroized code. */
9748 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
9750 tree cval1
= 0, cval2
= 0;
9753 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
9754 /* Don't handle degenerate cases here; they should already
9755 have been handled anyway. */
9756 && cval1
!= 0 && cval2
!= 0
9757 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
9758 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
9759 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
9760 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
9761 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
9762 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
9763 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
9765 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
9766 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
9768 /* We can't just pass T to eval_subst in case cval1 or cval2
9769 was the same as ARG1. */
9772 = fold_build2 (code
, type
,
9773 eval_subst (arg0
, cval1
, maxval
,
9777 = fold_build2 (code
, type
,
9778 eval_subst (arg0
, cval1
, maxval
,
9782 = fold_build2 (code
, type
,
9783 eval_subst (arg0
, cval1
, minval
,
9787 /* All three of these results should be 0 or 1. Confirm they
9788 are. Then use those values to select the proper code
9791 if ((integer_zerop (high_result
)
9792 || integer_onep (high_result
))
9793 && (integer_zerop (equal_result
)
9794 || integer_onep (equal_result
))
9795 && (integer_zerop (low_result
)
9796 || integer_onep (low_result
)))
9798 /* Make a 3-bit mask with the high-order bit being the
9799 value for `>', the next for '=', and the low for '<'. */
9800 switch ((integer_onep (high_result
) * 4)
9801 + (integer_onep (equal_result
) * 2)
9802 + integer_onep (low_result
))
9806 return omit_one_operand (type
, integer_zero_node
, arg0
);
9827 return omit_one_operand (type
, integer_one_node
, arg0
);
9831 return save_expr (build2 (code
, type
, cval1
, cval2
));
9833 return fold_build2 (code
, type
, cval1
, cval2
);
9838 /* If this is a comparison of a field, we may be able to simplify it. */
9839 if (((TREE_CODE (arg0
) == COMPONENT_REF
9840 && lang_hooks
.can_use_bit_fields_p ())
9841 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
9842 && (code
== EQ_EXPR
|| code
== NE_EXPR
)
9843 /* Handle the constant case even without -O
9844 to make sure the warnings are given. */
9845 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
9847 t1
= optimize_bit_field_compare (code
, type
, arg0
, arg1
);
9852 /* Fold a comparison of the address of COMPONENT_REFs with the same
9853 type and component to a comparison of the address of the base
9854 object. In short, &x->a OP &y->a to x OP y and
9855 &x->a OP &y.a to x OP &y */
9856 if (TREE_CODE (arg0
) == ADDR_EXPR
9857 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == COMPONENT_REF
9858 && TREE_CODE (arg1
) == ADDR_EXPR
9859 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == COMPONENT_REF
)
9861 tree cref0
= TREE_OPERAND (arg0
, 0);
9862 tree cref1
= TREE_OPERAND (arg1
, 0);
9863 if (TREE_OPERAND (cref0
, 1) == TREE_OPERAND (cref1
, 1))
9865 tree op0
= TREE_OPERAND (cref0
, 0);
9866 tree op1
= TREE_OPERAND (cref1
, 0);
9867 return fold_build2 (code
, type
,
9868 build_fold_addr_expr (op0
),
9869 build_fold_addr_expr (op1
));
9873 /* Optimize comparisons of strlen vs zero to a compare of the
9874 first character of the string vs zero. To wit,
9875 strlen(ptr) == 0 => *ptr == 0
9876 strlen(ptr) != 0 => *ptr != 0
9877 Other cases should reduce to one of these two (or a constant)
9878 due to the return value of strlen being unsigned. */
9879 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9880 && integer_zerop (arg1
)
9881 && TREE_CODE (arg0
) == CALL_EXPR
)
9883 tree fndecl
= get_callee_fndecl (arg0
);
9887 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
9888 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
9889 && (arglist
= TREE_OPERAND (arg0
, 1))
9890 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) == POINTER_TYPE
9891 && ! TREE_CHAIN (arglist
))
9893 tree iref
= build_fold_indirect_ref (TREE_VALUE (arglist
));
9894 return fold_build2 (code
, type
, iref
,
9895 build_int_cst (TREE_TYPE (iref
), 0));
9899 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9900 into a single range test. */
9901 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
9902 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
9903 && TREE_CODE (arg1
) == INTEGER_CST
9904 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9905 && !integer_zerop (TREE_OPERAND (arg0
, 1))
9906 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
9907 && !TREE_OVERFLOW (arg1
))
9909 t1
= fold_div_compare (code
, type
, arg0
, arg1
);
9910 if (t1
!= NULL_TREE
)
9914 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9915 && integer_zerop (arg1
)
9916 && tree_expr_nonzero_p (arg0
))
9918 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
9919 return omit_one_operand (type
, res
, arg0
);
9922 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
9923 return t1
== NULL_TREE
? NULL_TREE
: t1
;
9925 case UNORDERED_EXPR
:
9933 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
9935 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
9936 if (t1
!= NULL_TREE
)
9940 /* If the first operand is NaN, the result is constant. */
9941 if (TREE_CODE (arg0
) == REAL_CST
9942 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
9943 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
9945 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
9948 return omit_one_operand (type
, t1
, arg1
);
9951 /* If the second operand is NaN, the result is constant. */
9952 if (TREE_CODE (arg1
) == REAL_CST
9953 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
9954 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
9956 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
9959 return omit_one_operand (type
, t1
, arg0
);
9962 /* Simplify unordered comparison of something with itself. */
9963 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
9964 && operand_equal_p (arg0
, arg1
, 0))
9965 return constant_boolean_node (1, type
);
9967 if (code
== LTGT_EXPR
9968 && !flag_trapping_math
9969 && operand_equal_p (arg0
, arg1
, 0))
9970 return constant_boolean_node (0, type
);
9972 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9974 tree targ0
= strip_float_extensions (arg0
);
9975 tree targ1
= strip_float_extensions (arg1
);
9976 tree newtype
= TREE_TYPE (targ0
);
9978 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
9979 newtype
= TREE_TYPE (targ1
);
9981 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
9982 return fold_build2 (code
, type
, fold_convert (newtype
, targ0
),
9983 fold_convert (newtype
, targ1
));
9989 /* When pedantic, a compound expression can be neither an lvalue
9990 nor an integer constant expression. */
9991 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
9993 /* Don't let (0, 0) be null pointer constant. */
9994 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
9995 : fold_convert (type
, arg1
);
9996 return pedantic_non_lvalue (tem
);
10000 return build_complex (type
, arg0
, arg1
);
10004 /* An ASSERT_EXPR should never be passed to fold_binary. */
10005 gcc_unreachable ();
10009 } /* switch (code) */
10012 /* Callback for walk_tree, looking for LABEL_EXPR.
10013 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
10014 Do not check the sub-tree of GOTO_EXPR. */
10017 contains_label_1 (tree
*tp
,
10018 int *walk_subtrees
,
10019 void *data ATTRIBUTE_UNUSED
)
10021 switch (TREE_CODE (*tp
))
10026 *walk_subtrees
= 0;
10033 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
10034 accessible from outside the sub-tree. Returns NULL_TREE if no
10035 addressable label is found. */
10038 contains_label_p (tree st
)
10040 return (walk_tree (&st
, contains_label_1
, NULL
, NULL
) != NULL_TREE
);
10043 /* Fold a ternary expression of code CODE and type TYPE with operands
10044 OP0, OP1, and OP2. Return the folded expression if folding is
10045 successful. Otherwise, return NULL_TREE. */
10048 fold_ternary (enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
)
10051 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
10052 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
10054 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
10055 && TREE_CODE_LENGTH (code
) == 3);
10057 /* Strip any conversions that don't change the mode. This is safe
10058 for every expression, except for a comparison expression because
10059 its signedness is derived from its operands. So, in the latter
10060 case, only strip conversions that don't change the signedness.
10062 Note that this is done as an internal manipulation within the
10063 constant folder, in order to find the simplest representation of
10064 the arguments so that their form can be studied. In any cases,
10065 the appropriate type conversions should be put back in the tree
10066 that will get out of the constant folder. */
10081 case COMPONENT_REF
:
10082 if (TREE_CODE (arg0
) == CONSTRUCTOR
10083 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
10085 unsigned HOST_WIDE_INT idx
;
10087 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
10094 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
10095 so all simple results must be passed through pedantic_non_lvalue. */
10096 if (TREE_CODE (arg0
) == INTEGER_CST
)
10098 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
10099 tem
= integer_zerop (arg0
) ? op2
: op1
;
10100 /* Only optimize constant conditions when the selected branch
10101 has the same type as the COND_EXPR. This avoids optimizing
10102 away "c ? x : throw", where the throw has a void type.
10103 Avoid throwing away that operand which contains label. */
10104 if ((!TREE_SIDE_EFFECTS (unused_op
)
10105 || !contains_label_p (unused_op
))
10106 && (! VOID_TYPE_P (TREE_TYPE (tem
))
10107 || VOID_TYPE_P (type
)))
10108 return pedantic_non_lvalue (tem
);
10111 if (operand_equal_p (arg1
, op2
, 0))
10112 return pedantic_omit_one_operand (type
, arg1
, arg0
);
10114 /* If we have A op B ? A : C, we may be able to convert this to a
10115 simpler expression, depending on the operation and the values
10116 of B and C. Signed zeros prevent all of these transformations,
10117 for reasons given above each one.
10119 Also try swapping the arguments and inverting the conditional. */
10120 if (COMPARISON_CLASS_P (arg0
)
10121 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
10122 arg1
, TREE_OPERAND (arg0
, 1))
10123 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1
))))
10125 tem
= fold_cond_expr_with_comparison (type
, arg0
, op1
, op2
);
10130 if (COMPARISON_CLASS_P (arg0
)
10131 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
10133 TREE_OPERAND (arg0
, 1))
10134 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2
))))
10136 tem
= invert_truthvalue (arg0
);
10137 if (COMPARISON_CLASS_P (tem
))
10139 tem
= fold_cond_expr_with_comparison (type
, tem
, op2
, op1
);
10145 /* If the second operand is simpler than the third, swap them
10146 since that produces better jump optimization results. */
10147 if (truth_value_p (TREE_CODE (arg0
))
10148 && tree_swap_operands_p (op1
, op2
, false))
10150 /* See if this can be inverted. If it can't, possibly because
10151 it was a floating-point inequality comparison, don't do
10153 tem
= invert_truthvalue (arg0
);
10155 if (TREE_CODE (tem
) != TRUTH_NOT_EXPR
)
10156 return fold_build3 (code
, type
, tem
, op2
, op1
);
10159 /* Convert A ? 1 : 0 to simply A. */
10160 if (integer_onep (op1
)
10161 && integer_zerop (op2
)
10162 /* If we try to convert OP0 to our type, the
10163 call to fold will try to move the conversion inside
10164 a COND, which will recurse. In that case, the COND_EXPR
10165 is probably the best choice, so leave it alone. */
10166 && type
== TREE_TYPE (arg0
))
10167 return pedantic_non_lvalue (arg0
);
10169 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
10170 over COND_EXPR in cases such as floating point comparisons. */
10171 if (integer_zerop (op1
)
10172 && integer_onep (op2
)
10173 && truth_value_p (TREE_CODE (arg0
)))
10174 return pedantic_non_lvalue (fold_convert (type
,
10175 invert_truthvalue (arg0
)));
10177 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
10178 if (TREE_CODE (arg0
) == LT_EXPR
10179 && integer_zerop (TREE_OPERAND (arg0
, 1))
10180 && integer_zerop (op2
)
10181 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
10183 /* sign_bit_p only checks ARG1 bits within A's precision.
10184 If <sign bit of A> has wider type than A, bits outside
10185 of A's precision in <sign bit of A> need to be checked.
10186 If they are all 0, this optimization needs to be done
10187 in unsigned A's type, if they are all 1 in signed A's type,
10188 otherwise this can't be done. */
10189 if (TYPE_PRECISION (TREE_TYPE (tem
))
10190 < TYPE_PRECISION (TREE_TYPE (arg1
))
10191 && TYPE_PRECISION (TREE_TYPE (tem
))
10192 < TYPE_PRECISION (type
))
10194 unsigned HOST_WIDE_INT mask_lo
;
10195 HOST_WIDE_INT mask_hi
;
10196 int inner_width
, outer_width
;
10199 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
10200 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
10201 if (outer_width
> TYPE_PRECISION (type
))
10202 outer_width
= TYPE_PRECISION (type
);
10204 if (outer_width
> HOST_BITS_PER_WIDE_INT
)
10206 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
10207 >> (2 * HOST_BITS_PER_WIDE_INT
- outer_width
));
10213 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
10214 >> (HOST_BITS_PER_WIDE_INT
- outer_width
));
10216 if (inner_width
> HOST_BITS_PER_WIDE_INT
)
10218 mask_hi
&= ~((unsigned HOST_WIDE_INT
) -1
10219 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
10223 mask_lo
&= ~((unsigned HOST_WIDE_INT
) -1
10224 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
10226 if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == mask_hi
10227 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == mask_lo
)
10229 tem_type
= lang_hooks
.types
.signed_type (TREE_TYPE (tem
));
10230 tem
= fold_convert (tem_type
, tem
);
10232 else if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == 0
10233 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == 0)
10235 tem_type
= lang_hooks
.types
.unsigned_type (TREE_TYPE (tem
));
10236 tem
= fold_convert (tem_type
, tem
);
10243 return fold_convert (type
,
10244 fold_build2 (BIT_AND_EXPR
,
10245 TREE_TYPE (tem
), tem
,
10246 fold_convert (TREE_TYPE (tem
),
10250 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
10251 already handled above. */
10252 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10253 && integer_onep (TREE_OPERAND (arg0
, 1))
10254 && integer_zerop (op2
)
10255 && integer_pow2p (arg1
))
10257 tree tem
= TREE_OPERAND (arg0
, 0);
10259 if (TREE_CODE (tem
) == RSHIFT_EXPR
10260 && TREE_CODE (TREE_OPERAND (tem
, 1)) == INTEGER_CST
10261 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
10262 TREE_INT_CST_LOW (TREE_OPERAND (tem
, 1)))
10263 return fold_build2 (BIT_AND_EXPR
, type
,
10264 TREE_OPERAND (tem
, 0), arg1
);
10267 /* A & N ? N : 0 is simply A & N if N is a power of two. This
10268 is probably obsolete because the first operand should be a
10269 truth value (that's why we have the two cases above), but let's
10270 leave it in until we can confirm this for all front-ends. */
10271 if (integer_zerop (op2
)
10272 && TREE_CODE (arg0
) == NE_EXPR
10273 && integer_zerop (TREE_OPERAND (arg0
, 1))
10274 && integer_pow2p (arg1
)
10275 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
10276 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
10277 arg1
, OEP_ONLY_CONST
))
10278 return pedantic_non_lvalue (fold_convert (type
,
10279 TREE_OPERAND (arg0
, 0)));
10281 /* Convert A ? B : 0 into A && B if A and B are truth values. */
10282 if (integer_zerop (op2
)
10283 && truth_value_p (TREE_CODE (arg0
))
10284 && truth_value_p (TREE_CODE (arg1
)))
10285 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
10286 fold_convert (type
, arg0
),
10289 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
10290 if (integer_onep (op2
)
10291 && truth_value_p (TREE_CODE (arg0
))
10292 && truth_value_p (TREE_CODE (arg1
)))
10294 /* Only perform transformation if ARG0 is easily inverted. */
10295 tem
= invert_truthvalue (arg0
);
10296 if (TREE_CODE (tem
) != TRUTH_NOT_EXPR
)
10297 return fold_build2 (TRUTH_ORIF_EXPR
, type
,
10298 fold_convert (type
, tem
),
10302 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
10303 if (integer_zerop (arg1
)
10304 && truth_value_p (TREE_CODE (arg0
))
10305 && truth_value_p (TREE_CODE (op2
)))
10307 /* Only perform transformation if ARG0 is easily inverted. */
10308 tem
= invert_truthvalue (arg0
);
10309 if (TREE_CODE (tem
) != TRUTH_NOT_EXPR
)
10310 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
10311 fold_convert (type
, tem
),
10315 /* Convert A ? 1 : B into A || B if A and B are truth values. */
10316 if (integer_onep (arg1
)
10317 && truth_value_p (TREE_CODE (arg0
))
10318 && truth_value_p (TREE_CODE (op2
)))
10319 return fold_build2 (TRUTH_ORIF_EXPR
, type
,
10320 fold_convert (type
, arg0
),
10326 /* Check for a built-in function. */
10327 if (TREE_CODE (op0
) == ADDR_EXPR
10328 && TREE_CODE (TREE_OPERAND (op0
, 0)) == FUNCTION_DECL
10329 && DECL_BUILT_IN (TREE_OPERAND (op0
, 0)))
10330 return fold_builtin (TREE_OPERAND (op0
, 0), op1
, false);
10333 case BIT_FIELD_REF
:
10334 if (TREE_CODE (arg0
) == VECTOR_CST
10335 && type
== TREE_TYPE (TREE_TYPE (arg0
))
10336 && host_integerp (arg1
, 1)
10337 && host_integerp (op2
, 1))
10339 unsigned HOST_WIDE_INT width
= tree_low_cst (arg1
, 1);
10340 unsigned HOST_WIDE_INT idx
= tree_low_cst (op2
, 1);
10343 && simple_cst_equal (arg1
, TYPE_SIZE (type
)) == 1
10344 && (idx
% width
) == 0
10345 && (idx
= idx
/ width
)
10346 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
10348 tree elements
= TREE_VECTOR_CST_ELTS (arg0
);
10349 while (idx
-- > 0 && elements
)
10350 elements
= TREE_CHAIN (elements
);
10352 return TREE_VALUE (elements
);
10354 return fold_convert (type
, integer_zero_node
);
10361 } /* switch (code) */
10364 /* Perform constant folding and related simplification of EXPR.
10365 The related simplifications include x*1 => x, x*0 => 0, etc.,
10366 and application of the associative law.
10367 NOP_EXPR conversions may be removed freely (as long as we
10368 are careful not to change the type of the overall expression).
10369 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
10370 but we can constant-fold them if they have constant operands. */
10372 #ifdef ENABLE_FOLD_CHECKING
10373 # define fold(x) fold_1 (x)
10374 static tree
fold_1 (tree
);
10380 const tree t
= expr
;
10381 enum tree_code code
= TREE_CODE (t
);
10382 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
10385 /* Return right away if a constant. */
10386 if (kind
== tcc_constant
)
10389 if (IS_EXPR_CODE_CLASS (kind
))
10391 tree type
= TREE_TYPE (t
);
10392 tree op0
, op1
, op2
;
10394 switch (TREE_CODE_LENGTH (code
))
10397 op0
= TREE_OPERAND (t
, 0);
10398 tem
= fold_unary (code
, type
, op0
);
10399 return tem
? tem
: expr
;
10401 op0
= TREE_OPERAND (t
, 0);
10402 op1
= TREE_OPERAND (t
, 1);
10403 tem
= fold_binary (code
, type
, op0
, op1
);
10404 return tem
? tem
: expr
;
10406 op0
= TREE_OPERAND (t
, 0);
10407 op1
= TREE_OPERAND (t
, 1);
10408 op2
= TREE_OPERAND (t
, 2);
10409 tem
= fold_ternary (code
, type
, op0
, op1
, op2
);
10410 return tem
? tem
: expr
;
10419 return fold (DECL_INITIAL (t
));
10423 } /* switch (code) */
10426 #ifdef ENABLE_FOLD_CHECKING
10429 static void fold_checksum_tree (tree
, struct md5_ctx
*, htab_t
);
10430 static void fold_check_failed (tree
, tree
);
10431 void print_fold_checksum (tree
);
10433 /* When --enable-checking=fold, compute a digest of expr before
10434 and after actual fold call to see if fold did not accidentally
10435 change original expr. */
10441 struct md5_ctx ctx
;
10442 unsigned char checksum_before
[16], checksum_after
[16];
10445 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
10446 md5_init_ctx (&ctx
);
10447 fold_checksum_tree (expr
, &ctx
, ht
);
10448 md5_finish_ctx (&ctx
, checksum_before
);
10451 ret
= fold_1 (expr
);
10453 md5_init_ctx (&ctx
);
10454 fold_checksum_tree (expr
, &ctx
, ht
);
10455 md5_finish_ctx (&ctx
, checksum_after
);
10458 if (memcmp (checksum_before
, checksum_after
, 16))
10459 fold_check_failed (expr
, ret
);
10465 print_fold_checksum (tree expr
)
10467 struct md5_ctx ctx
;
10468 unsigned char checksum
[16], cnt
;
10471 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
10472 md5_init_ctx (&ctx
);
10473 fold_checksum_tree (expr
, &ctx
, ht
);
10474 md5_finish_ctx (&ctx
, checksum
);
10476 for (cnt
= 0; cnt
< 16; ++cnt
)
10477 fprintf (stderr
, "%02x", checksum
[cnt
]);
10478 putc ('\n', stderr
);
10482 fold_check_failed (tree expr ATTRIBUTE_UNUSED
, tree ret ATTRIBUTE_UNUSED
)
10484 internal_error ("fold check: original tree changed by fold");
10488 fold_checksum_tree (tree expr
, struct md5_ctx
*ctx
, htab_t ht
)
10491 enum tree_code code
;
10492 struct tree_function_decl buf
;
10497 gcc_assert ((sizeof (struct tree_exp
) + 5 * sizeof (tree
)
10498 <= sizeof (struct tree_function_decl
))
10499 && sizeof (struct tree_type
) <= sizeof (struct tree_function_decl
));
10502 slot
= htab_find_slot (ht
, expr
, INSERT
);
10506 code
= TREE_CODE (expr
);
10507 if (TREE_CODE_CLASS (code
) == tcc_declaration
10508 && DECL_ASSEMBLER_NAME_SET_P (expr
))
10510 /* Allow DECL_ASSEMBLER_NAME to be modified. */
10511 memcpy ((char *) &buf
, expr
, tree_size (expr
));
10512 expr
= (tree
) &buf
;
10513 SET_DECL_ASSEMBLER_NAME (expr
, NULL
);
10515 else if (TREE_CODE_CLASS (code
) == tcc_type
10516 && (TYPE_POINTER_TO (expr
) || TYPE_REFERENCE_TO (expr
)
10517 || TYPE_CACHED_VALUES_P (expr
)
10518 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)))
10520 /* Allow these fields to be modified. */
10521 memcpy ((char *) &buf
, expr
, tree_size (expr
));
10522 expr
= (tree
) &buf
;
10523 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
) = 0;
10524 TYPE_POINTER_TO (expr
) = NULL
;
10525 TYPE_REFERENCE_TO (expr
) = NULL
;
10526 if (TYPE_CACHED_VALUES_P (expr
))
10528 TYPE_CACHED_VALUES_P (expr
) = 0;
10529 TYPE_CACHED_VALUES (expr
) = NULL
;
10532 md5_process_bytes (expr
, tree_size (expr
), ctx
);
10533 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
10534 if (TREE_CODE_CLASS (code
) != tcc_type
10535 && TREE_CODE_CLASS (code
) != tcc_declaration
10536 && code
!= TREE_LIST
)
10537 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
10538 switch (TREE_CODE_CLASS (code
))
10544 md5_process_bytes (TREE_STRING_POINTER (expr
),
10545 TREE_STRING_LENGTH (expr
), ctx
);
10548 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
10549 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
10552 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr
), ctx
, ht
);
10558 case tcc_exceptional
:
10562 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
10563 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
10564 expr
= TREE_CHAIN (expr
);
10565 goto recursive_label
;
10568 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
10569 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
10575 case tcc_expression
:
10576 case tcc_reference
:
10577 case tcc_comparison
:
10580 case tcc_statement
:
10581 len
= TREE_CODE_LENGTH (code
);
10582 for (i
= 0; i
< len
; ++i
)
10583 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
10585 case tcc_declaration
:
10586 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
10587 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
10588 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
10589 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
10590 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
10591 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
10592 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
10593 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_WITH_VIS
))
10594 fold_checksum_tree (DECL_SECTION_NAME (expr
), ctx
, ht
);
10596 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
10598 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
10599 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
10600 fold_checksum_tree (DECL_ARGUMENT_FLD (expr
), ctx
, ht
);
10604 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
10605 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
10606 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
10607 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
10608 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
10609 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
10610 if (INTEGRAL_TYPE_P (expr
)
10611 || SCALAR_FLOAT_TYPE_P (expr
))
10613 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
10614 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
10616 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
10617 if (TREE_CODE (expr
) == RECORD_TYPE
10618 || TREE_CODE (expr
) == UNION_TYPE
10619 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
10620 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
10621 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
10630 /* Fold a unary tree expression with code CODE of type TYPE with an
10631 operand OP0. Return a folded expression if successful. Otherwise,
10632 return a tree expression with code CODE of type TYPE with an
10636 fold_build1_stat (enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
10639 #ifdef ENABLE_FOLD_CHECKING
10640 unsigned char checksum_before
[16], checksum_after
[16];
10641 struct md5_ctx ctx
;
10644 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
10645 md5_init_ctx (&ctx
);
10646 fold_checksum_tree (op0
, &ctx
, ht
);
10647 md5_finish_ctx (&ctx
, checksum_before
);
10651 tem
= fold_unary (code
, type
, op0
);
10653 tem
= build1_stat (code
, type
, op0 PASS_MEM_STAT
);
10655 #ifdef ENABLE_FOLD_CHECKING
10656 md5_init_ctx (&ctx
);
10657 fold_checksum_tree (op0
, &ctx
, ht
);
10658 md5_finish_ctx (&ctx
, checksum_after
);
10661 if (memcmp (checksum_before
, checksum_after
, 16))
10662 fold_check_failed (op0
, tem
);
10667 /* Fold a binary tree expression with code CODE of type TYPE with
10668 operands OP0 and OP1. Return a folded expression if successful.
10669 Otherwise, return a tree expression with code CODE of type TYPE
10670 with operands OP0 and OP1. */
10673 fold_build2_stat (enum tree_code code
, tree type
, tree op0
, tree op1
10677 #ifdef ENABLE_FOLD_CHECKING
10678 unsigned char checksum_before_op0
[16],
10679 checksum_before_op1
[16],
10680 checksum_after_op0
[16],
10681 checksum_after_op1
[16];
10682 struct md5_ctx ctx
;
10685 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
10686 md5_init_ctx (&ctx
);
10687 fold_checksum_tree (op0
, &ctx
, ht
);
10688 md5_finish_ctx (&ctx
, checksum_before_op0
);
10691 md5_init_ctx (&ctx
);
10692 fold_checksum_tree (op1
, &ctx
, ht
);
10693 md5_finish_ctx (&ctx
, checksum_before_op1
);
10697 tem
= fold_binary (code
, type
, op0
, op1
);
10699 tem
= build2_stat (code
, type
, op0
, op1 PASS_MEM_STAT
);
10701 #ifdef ENABLE_FOLD_CHECKING
10702 md5_init_ctx (&ctx
);
10703 fold_checksum_tree (op0
, &ctx
, ht
);
10704 md5_finish_ctx (&ctx
, checksum_after_op0
);
10707 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
10708 fold_check_failed (op0
, tem
);
10710 md5_init_ctx (&ctx
);
10711 fold_checksum_tree (op1
, &ctx
, ht
);
10712 md5_finish_ctx (&ctx
, checksum_after_op1
);
10715 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
10716 fold_check_failed (op1
, tem
);
10721 /* Fold a ternary tree expression with code CODE of type TYPE with
10722 operands OP0, OP1, and OP2. Return a folded expression if
10723 successful. Otherwise, return a tree expression with code CODE of
10724 type TYPE with operands OP0, OP1, and OP2. */
10727 fold_build3_stat (enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
10731 #ifdef ENABLE_FOLD_CHECKING
10732 unsigned char checksum_before_op0
[16],
10733 checksum_before_op1
[16],
10734 checksum_before_op2
[16],
10735 checksum_after_op0
[16],
10736 checksum_after_op1
[16],
10737 checksum_after_op2
[16];
10738 struct md5_ctx ctx
;
10741 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
10742 md5_init_ctx (&ctx
);
10743 fold_checksum_tree (op0
, &ctx
, ht
);
10744 md5_finish_ctx (&ctx
, checksum_before_op0
);
10747 md5_init_ctx (&ctx
);
10748 fold_checksum_tree (op1
, &ctx
, ht
);
10749 md5_finish_ctx (&ctx
, checksum_before_op1
);
10752 md5_init_ctx (&ctx
);
10753 fold_checksum_tree (op2
, &ctx
, ht
);
10754 md5_finish_ctx (&ctx
, checksum_before_op2
);
10758 tem
= fold_ternary (code
, type
, op0
, op1
, op2
);
10760 tem
= build3_stat (code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
10762 #ifdef ENABLE_FOLD_CHECKING
10763 md5_init_ctx (&ctx
);
10764 fold_checksum_tree (op0
, &ctx
, ht
);
10765 md5_finish_ctx (&ctx
, checksum_after_op0
);
10768 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
10769 fold_check_failed (op0
, tem
);
10771 md5_init_ctx (&ctx
);
10772 fold_checksum_tree (op1
, &ctx
, ht
);
10773 md5_finish_ctx (&ctx
, checksum_after_op1
);
10776 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
10777 fold_check_failed (op1
, tem
);
10779 md5_init_ctx (&ctx
);
10780 fold_checksum_tree (op2
, &ctx
, ht
);
10781 md5_finish_ctx (&ctx
, checksum_after_op2
);
10784 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
10785 fold_check_failed (op2
, tem
);
10790 /* Perform constant folding and related simplification of initializer
10791 expression EXPR. These behave identically to "fold_buildN" but ignore
10792 potential run-time traps and exceptions that fold must preserve. */
10794 #define START_FOLD_INIT \
10795 int saved_signaling_nans = flag_signaling_nans;\
10796 int saved_trapping_math = flag_trapping_math;\
10797 int saved_rounding_math = flag_rounding_math;\
10798 int saved_trapv = flag_trapv;\
10799 flag_signaling_nans = 0;\
10800 flag_trapping_math = 0;\
10801 flag_rounding_math = 0;\
10804 #define END_FOLD_INIT \
10805 flag_signaling_nans = saved_signaling_nans;\
10806 flag_trapping_math = saved_trapping_math;\
10807 flag_rounding_math = saved_rounding_math;\
10808 flag_trapv = saved_trapv
10811 fold_build1_initializer (enum tree_code code
, tree type
, tree op
)
10816 result
= fold_build1 (code
, type
, op
);
10823 fold_build2_initializer (enum tree_code code
, tree type
, tree op0
, tree op1
)
10828 result
= fold_build2 (code
, type
, op0
, op1
);
10835 fold_build3_initializer (enum tree_code code
, tree type
, tree op0
, tree op1
,
10841 result
= fold_build3 (code
, type
, op0
, op1
, op2
);
10847 #undef START_FOLD_INIT
10848 #undef END_FOLD_INIT
10850 /* Determine if first argument is a multiple of second argument. Return 0 if
10851 it is not, or we cannot easily determined it to be.
10853 An example of the sort of thing we care about (at this point; this routine
10854 could surely be made more general, and expanded to do what the *_DIV_EXPR's
10855 fold cases do now) is discovering that
10857 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10863 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
10865 This code also handles discovering that
10867 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10869 is a multiple of 8 so we don't have to worry about dealing with a
10870 possible remainder.
10872 Note that we *look* inside a SAVE_EXPR only to determine how it was
10873 calculated; it is not safe for fold to do much of anything else with the
10874 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
10875 at run time. For example, the latter example above *cannot* be implemented
10876 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
10877 evaluation time of the original SAVE_EXPR is not necessarily the same at
10878 the time the new expression is evaluated. The only optimization of this
10879 sort that would be valid is changing
10881 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
10885 SAVE_EXPR (I) * SAVE_EXPR (J)
10887 (where the same SAVE_EXPR (J) is used in the original and the
10888 transformed version). */
10891 multiple_of_p (tree type
, tree top
, tree bottom
)
10893 if (operand_equal_p (top
, bottom
, 0))
10896 if (TREE_CODE (type
) != INTEGER_TYPE
)
10899 switch (TREE_CODE (top
))
10902 /* Bitwise and provides a power of two multiple. If the mask is
10903 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
10904 if (!integer_pow2p (bottom
))
10909 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
10910 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
10914 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
10915 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
10918 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
10922 op1
= TREE_OPERAND (top
, 1);
10923 /* const_binop may not detect overflow correctly,
10924 so check for it explicitly here. */
10925 if (TYPE_PRECISION (TREE_TYPE (size_one_node
))
10926 > TREE_INT_CST_LOW (op1
)
10927 && TREE_INT_CST_HIGH (op1
) == 0
10928 && 0 != (t1
= fold_convert (type
,
10929 const_binop (LSHIFT_EXPR
,
10932 && ! TREE_OVERFLOW (t1
))
10933 return multiple_of_p (type
, t1
, bottom
);
10938 /* Can't handle conversions from non-integral or wider integral type. */
10939 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
10940 || (TYPE_PRECISION (type
)
10941 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
10944 /* .. fall through ... */
10947 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
10950 if (TREE_CODE (bottom
) != INTEGER_CST
10951 || (TYPE_UNSIGNED (type
)
10952 && (tree_int_cst_sgn (top
) < 0
10953 || tree_int_cst_sgn (bottom
) < 0)))
10955 return integer_zerop (const_binop (TRUNC_MOD_EXPR
,
10963 /* Return true if `t' is known to be non-negative. */
10966 tree_expr_nonnegative_p (tree t
)
10968 if (t
== error_mark_node
)
10971 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
10974 switch (TREE_CODE (t
))
10977 /* We can't return 1 if flag_wrapv is set because
10978 ABS_EXPR<INT_MIN> = INT_MIN. */
10979 if (!(flag_wrapv
&& INTEGRAL_TYPE_P (TREE_TYPE (t
))))
10984 return tree_int_cst_sgn (t
) >= 0;
10987 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
10990 if (FLOAT_TYPE_P (TREE_TYPE (t
)))
10991 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
10992 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
10994 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
10995 both unsigned and at least 2 bits shorter than the result. */
10996 if (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
10997 && TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
10998 && TREE_CODE (TREE_OPERAND (t
, 1)) == NOP_EXPR
)
11000 tree inner1
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
11001 tree inner2
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0));
11002 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
11003 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
11005 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
11006 TYPE_PRECISION (inner2
)) + 1;
11007 return prec
< TYPE_PRECISION (TREE_TYPE (t
));
11013 if (FLOAT_TYPE_P (TREE_TYPE (t
)))
11015 /* x * x for floating point x is always non-negative. */
11016 if (operand_equal_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1), 0))
11018 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
11019 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
11022 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
11023 both unsigned and their total bits is shorter than the result. */
11024 if (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
11025 && TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
11026 && TREE_CODE (TREE_OPERAND (t
, 1)) == NOP_EXPR
)
11028 tree inner1
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
11029 tree inner2
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0));
11030 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
11031 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
11032 return TYPE_PRECISION (inner1
) + TYPE_PRECISION (inner2
)
11033 < TYPE_PRECISION (TREE_TYPE (t
));
11039 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
11040 || tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
11046 case TRUNC_DIV_EXPR
:
11047 case CEIL_DIV_EXPR
:
11048 case FLOOR_DIV_EXPR
:
11049 case ROUND_DIV_EXPR
:
11050 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
11051 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
11053 case TRUNC_MOD_EXPR
:
11054 case CEIL_MOD_EXPR
:
11055 case FLOOR_MOD_EXPR
:
11056 case ROUND_MOD_EXPR
:
11058 case NON_LVALUE_EXPR
:
11060 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
11062 case COMPOUND_EXPR
:
11064 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
11067 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t
, 1)));
11070 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1))
11071 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 2));
11075 tree inner_type
= TREE_TYPE (TREE_OPERAND (t
, 0));
11076 tree outer_type
= TREE_TYPE (t
);
11078 if (TREE_CODE (outer_type
) == REAL_TYPE
)
11080 if (TREE_CODE (inner_type
) == REAL_TYPE
)
11081 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
11082 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
11084 if (TYPE_UNSIGNED (inner_type
))
11086 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
11089 else if (TREE_CODE (outer_type
) == INTEGER_TYPE
)
11091 if (TREE_CODE (inner_type
) == REAL_TYPE
)
11092 return tree_expr_nonnegative_p (TREE_OPERAND (t
,0));
11093 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
11094 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
11095 && TYPE_UNSIGNED (inner_type
);
11102 tree temp
= TARGET_EXPR_SLOT (t
);
11103 t
= TARGET_EXPR_INITIAL (t
);
11105 /* If the initializer is non-void, then it's a normal expression
11106 that will be assigned to the slot. */
11107 if (!VOID_TYPE_P (t
))
11108 return tree_expr_nonnegative_p (t
);
11110 /* Otherwise, the initializer sets the slot in some way. One common
11111 way is an assignment statement at the end of the initializer. */
11114 if (TREE_CODE (t
) == BIND_EXPR
)
11115 t
= expr_last (BIND_EXPR_BODY (t
));
11116 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
11117 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
11118 t
= expr_last (TREE_OPERAND (t
, 0));
11119 else if (TREE_CODE (t
) == STATEMENT_LIST
)
11124 if (TREE_CODE (t
) == MODIFY_EXPR
11125 && TREE_OPERAND (t
, 0) == temp
)
11126 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
11133 tree fndecl
= get_callee_fndecl (t
);
11134 tree arglist
= TREE_OPERAND (t
, 1);
11135 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
11136 switch (DECL_FUNCTION_CODE (fndecl
))
11138 #define CASE_BUILTIN_F(BUILT_IN_FN) \
11139 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
11140 #define CASE_BUILTIN_I(BUILT_IN_FN) \
11141 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
11143 CASE_BUILTIN_F (BUILT_IN_ACOS
)
11144 CASE_BUILTIN_F (BUILT_IN_ACOSH
)
11145 CASE_BUILTIN_F (BUILT_IN_CABS
)
11146 CASE_BUILTIN_F (BUILT_IN_COSH
)
11147 CASE_BUILTIN_F (BUILT_IN_ERFC
)
11148 CASE_BUILTIN_F (BUILT_IN_EXP
)
11149 CASE_BUILTIN_F (BUILT_IN_EXP10
)
11150 CASE_BUILTIN_F (BUILT_IN_EXP2
)
11151 CASE_BUILTIN_F (BUILT_IN_FABS
)
11152 CASE_BUILTIN_F (BUILT_IN_FDIM
)
11153 CASE_BUILTIN_F (BUILT_IN_HYPOT
)
11154 CASE_BUILTIN_F (BUILT_IN_POW10
)
11155 CASE_BUILTIN_I (BUILT_IN_FFS
)
11156 CASE_BUILTIN_I (BUILT_IN_PARITY
)
11157 CASE_BUILTIN_I (BUILT_IN_POPCOUNT
)
11161 CASE_BUILTIN_F (BUILT_IN_SQRT
)
11162 /* sqrt(-0.0) is -0.0. */
11163 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t
))))
11165 return tree_expr_nonnegative_p (TREE_VALUE (arglist
));
11167 CASE_BUILTIN_F (BUILT_IN_ASINH
)
11168 CASE_BUILTIN_F (BUILT_IN_ATAN
)
11169 CASE_BUILTIN_F (BUILT_IN_ATANH
)
11170 CASE_BUILTIN_F (BUILT_IN_CBRT
)
11171 CASE_BUILTIN_F (BUILT_IN_CEIL
)
11172 CASE_BUILTIN_F (BUILT_IN_ERF
)
11173 CASE_BUILTIN_F (BUILT_IN_EXPM1
)
11174 CASE_BUILTIN_F (BUILT_IN_FLOOR
)
11175 CASE_BUILTIN_F (BUILT_IN_FMOD
)
11176 CASE_BUILTIN_F (BUILT_IN_FREXP
)
11177 CASE_BUILTIN_F (BUILT_IN_LCEIL
)
11178 CASE_BUILTIN_F (BUILT_IN_LDEXP
)
11179 CASE_BUILTIN_F (BUILT_IN_LFLOOR
)
11180 CASE_BUILTIN_F (BUILT_IN_LLCEIL
)
11181 CASE_BUILTIN_F (BUILT_IN_LLFLOOR
)
11182 CASE_BUILTIN_F (BUILT_IN_LLRINT
)
11183 CASE_BUILTIN_F (BUILT_IN_LLROUND
)
11184 CASE_BUILTIN_F (BUILT_IN_LRINT
)
11185 CASE_BUILTIN_F (BUILT_IN_LROUND
)
11186 CASE_BUILTIN_F (BUILT_IN_MODF
)
11187 CASE_BUILTIN_F (BUILT_IN_NEARBYINT
)
11188 CASE_BUILTIN_F (BUILT_IN_POW
)
11189 CASE_BUILTIN_F (BUILT_IN_RINT
)
11190 CASE_BUILTIN_F (BUILT_IN_ROUND
)
11191 CASE_BUILTIN_F (BUILT_IN_SIGNBIT
)
11192 CASE_BUILTIN_F (BUILT_IN_SINH
)
11193 CASE_BUILTIN_F (BUILT_IN_TANH
)
11194 CASE_BUILTIN_F (BUILT_IN_TRUNC
)
11195 /* True if the 1st argument is nonnegative. */
11196 return tree_expr_nonnegative_p (TREE_VALUE (arglist
));
11198 CASE_BUILTIN_F (BUILT_IN_FMAX
)
11199 /* True if the 1st OR 2nd arguments are nonnegative. */
11200 return tree_expr_nonnegative_p (TREE_VALUE (arglist
))
11201 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist
)));
11203 CASE_BUILTIN_F (BUILT_IN_FMIN
)
11204 /* True if the 1st AND 2nd arguments are nonnegative. */
11205 return tree_expr_nonnegative_p (TREE_VALUE (arglist
))
11206 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist
)));
11208 CASE_BUILTIN_F (BUILT_IN_COPYSIGN
)
11209 /* True if the 2nd argument is nonnegative. */
11210 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist
)));
11214 #undef CASE_BUILTIN_F
11215 #undef CASE_BUILTIN_I
11219 /* ... fall through ... */
11222 if (truth_value_p (TREE_CODE (t
)))
11223 /* Truth values evaluate to 0 or 1, which is nonnegative. */
11227 /* We don't know sign of `t', so be conservative and return false. */
11231 /* Return true when T is an address and is known to be nonzero.
11232 For floating point we further ensure that T is not denormal.
11233 Similar logic is present in nonzero_address in rtlanal.h. */
11236 tree_expr_nonzero_p (tree t
)
11238 tree type
= TREE_TYPE (t
);
11240 /* Doing something useful for floating point would need more work. */
11241 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
11244 switch (TREE_CODE (t
))
11247 return tree_expr_nonzero_p (TREE_OPERAND (t
, 0));
11250 /* We used to test for !integer_zerop here. This does not work correctly
11251 if TREE_CONSTANT_OVERFLOW (t). */
11252 return (TREE_INT_CST_LOW (t
) != 0
11253 || TREE_INT_CST_HIGH (t
) != 0);
11256 if (!TYPE_UNSIGNED (type
) && !flag_wrapv
)
11258 /* With the presence of negative values it is hard
11259 to say something. */
11260 if (!tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
11261 || !tree_expr_nonnegative_p (TREE_OPERAND (t
, 1)))
11263 /* One of operands must be positive and the other non-negative. */
11264 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 0))
11265 || tree_expr_nonzero_p (TREE_OPERAND (t
, 1)));
11270 if (!TYPE_UNSIGNED (type
) && !flag_wrapv
)
11272 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 0))
11273 && tree_expr_nonzero_p (TREE_OPERAND (t
, 1)));
11279 tree inner_type
= TREE_TYPE (TREE_OPERAND (t
, 0));
11280 tree outer_type
= TREE_TYPE (t
);
11282 return (TYPE_PRECISION (inner_type
) >= TYPE_PRECISION (outer_type
)
11283 && tree_expr_nonzero_p (TREE_OPERAND (t
, 0)));
11289 tree base
= get_base_address (TREE_OPERAND (t
, 0));
11294 /* Weak declarations may link to NULL. */
11295 if (VAR_OR_FUNCTION_DECL_P (base
))
11296 return !DECL_WEAK (base
);
11298 /* Constants are never weak. */
11299 if (CONSTANT_CLASS_P (base
))
11306 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 1))
11307 && tree_expr_nonzero_p (TREE_OPERAND (t
, 2)));
11310 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 0))
11311 && tree_expr_nonzero_p (TREE_OPERAND (t
, 1)));
11314 if (tree_expr_nonzero_p (TREE_OPERAND (t
, 0)))
11316 /* When both operands are nonzero, then MAX must be too. */
11317 if (tree_expr_nonzero_p (TREE_OPERAND (t
, 1)))
11320 /* MAX where operand 0 is positive is positive. */
11321 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
11323 /* MAX where operand 1 is positive is positive. */
11324 else if (tree_expr_nonzero_p (TREE_OPERAND (t
, 1))
11325 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1)))
11329 case COMPOUND_EXPR
:
11332 return tree_expr_nonzero_p (TREE_OPERAND (t
, 1));
11335 case NON_LVALUE_EXPR
:
11336 return tree_expr_nonzero_p (TREE_OPERAND (t
, 0));
11339 return tree_expr_nonzero_p (TREE_OPERAND (t
, 1))
11340 || tree_expr_nonzero_p (TREE_OPERAND (t
, 0));
11343 return alloca_call_p (t
);
11351 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
11352 attempt to fold the expression to a constant without modifying TYPE,
11355 If the expression could be simplified to a constant, then return
11356 the constant. If the expression would not be simplified to a
11357 constant, then return NULL_TREE. */
11360 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
11362 tree tem
= fold_binary (code
, type
, op0
, op1
);
11363 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
11366 /* Given the components of a unary expression CODE, TYPE and OP0,
11367 attempt to fold the expression to a constant without modifying
11370 If the expression could be simplified to a constant, then return
11371 the constant. If the expression would not be simplified to a
11372 constant, then return NULL_TREE. */
11375 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
11377 tree tem
= fold_unary (code
, type
, op0
);
11378 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
11381 /* If EXP represents referencing an element in a constant string
11382 (either via pointer arithmetic or array indexing), return the
11383 tree representing the value accessed, otherwise return NULL. */
11386 fold_read_from_constant_string (tree exp
)
11388 if (TREE_CODE (exp
) == INDIRECT_REF
|| TREE_CODE (exp
) == ARRAY_REF
)
11390 tree exp1
= TREE_OPERAND (exp
, 0);
11394 if (TREE_CODE (exp
) == INDIRECT_REF
)
11395 string
= string_constant (exp1
, &index
);
11398 tree low_bound
= array_ref_low_bound (exp
);
11399 index
= fold_convert (sizetype
, TREE_OPERAND (exp
, 1));
11401 /* Optimize the special-case of a zero lower bound.
11403 We convert the low_bound to sizetype to avoid some problems
11404 with constant folding. (E.g. suppose the lower bound is 1,
11405 and its mode is QI. Without the conversion,l (ARRAY
11406 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
11407 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
11408 if (! integer_zerop (low_bound
))
11409 index
= size_diffop (index
, fold_convert (sizetype
, low_bound
));
11415 && TREE_TYPE (exp
) == TREE_TYPE (TREE_TYPE (string
))
11416 && TREE_CODE (string
) == STRING_CST
11417 && TREE_CODE (index
) == INTEGER_CST
11418 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
11419 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
11421 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
11422 return fold_convert (TREE_TYPE (exp
),
11423 build_int_cst (NULL_TREE
,
11424 (TREE_STRING_POINTER (string
)
11425 [TREE_INT_CST_LOW (index
)])));
11430 /* Return the tree for neg (ARG0) when ARG0 is known to be either
11431 an integer constant or real constant.
11433 TYPE is the type of the result. */
11436 fold_negate_const (tree arg0
, tree type
)
11438 tree t
= NULL_TREE
;
11440 switch (TREE_CODE (arg0
))
11444 unsigned HOST_WIDE_INT low
;
11445 HOST_WIDE_INT high
;
11446 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
11447 TREE_INT_CST_HIGH (arg0
),
11449 t
= build_int_cst_wide (type
, low
, high
);
11450 t
= force_fit_type (t
, 1,
11451 (overflow
| TREE_OVERFLOW (arg0
))
11452 && !TYPE_UNSIGNED (type
),
11453 TREE_CONSTANT_OVERFLOW (arg0
));
11458 t
= build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
11462 gcc_unreachable ();
11468 /* Return the tree for abs (ARG0) when ARG0 is known to be either
11469 an integer constant or real constant.
11471 TYPE is the type of the result. */
11474 fold_abs_const (tree arg0
, tree type
)
11476 tree t
= NULL_TREE
;
11478 switch (TREE_CODE (arg0
))
11481 /* If the value is unsigned, then the absolute value is
11482 the same as the ordinary value. */
11483 if (TYPE_UNSIGNED (type
))
11485 /* Similarly, if the value is non-negative. */
11486 else if (INT_CST_LT (integer_minus_one_node
, arg0
))
11488 /* If the value is negative, then the absolute value is
11492 unsigned HOST_WIDE_INT low
;
11493 HOST_WIDE_INT high
;
11494 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
11495 TREE_INT_CST_HIGH (arg0
),
11497 t
= build_int_cst_wide (type
, low
, high
);
11498 t
= force_fit_type (t
, -1, overflow
| TREE_OVERFLOW (arg0
),
11499 TREE_CONSTANT_OVERFLOW (arg0
));
11504 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
11505 t
= build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
11511 gcc_unreachable ();
11517 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
11518 constant. TYPE is the type of the result. */
11521 fold_not_const (tree arg0
, tree type
)
11523 tree t
= NULL_TREE
;
11525 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
11527 t
= build_int_cst_wide (type
,
11528 ~ TREE_INT_CST_LOW (arg0
),
11529 ~ TREE_INT_CST_HIGH (arg0
));
11530 t
= force_fit_type (t
, 0, TREE_OVERFLOW (arg0
),
11531 TREE_CONSTANT_OVERFLOW (arg0
));
11536 /* Given CODE, a relational operator, the target type, TYPE and two
11537 constant operands OP0 and OP1, return the result of the
11538 relational operation. If the result is not a compile time
11539 constant, then return NULL_TREE. */
11542 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
11544 int result
, invert
;
11546 /* From here on, the only cases we handle are when the result is
11547 known to be a constant. */
11549 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
11551 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
11552 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
11554 /* Handle the cases where either operand is a NaN. */
11555 if (real_isnan (c0
) || real_isnan (c1
))
11565 case UNORDERED_EXPR
:
11579 if (flag_trapping_math
)
11585 gcc_unreachable ();
11588 return constant_boolean_node (result
, type
);
11591 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
11594 /* Handle equality/inequality of complex constants. */
11595 if (TREE_CODE (op0
) == COMPLEX_CST
&& TREE_CODE (op1
) == COMPLEX_CST
)
11597 tree rcond
= fold_relational_const (code
, type
,
11598 TREE_REALPART (op0
),
11599 TREE_REALPART (op1
));
11600 tree icond
= fold_relational_const (code
, type
,
11601 TREE_IMAGPART (op0
),
11602 TREE_IMAGPART (op1
));
11603 if (code
== EQ_EXPR
)
11604 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, rcond
, icond
);
11605 else if (code
== NE_EXPR
)
11606 return fold_build2 (TRUTH_ORIF_EXPR
, type
, rcond
, icond
);
11611 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
11613 To compute GT, swap the arguments and do LT.
11614 To compute GE, do LT and invert the result.
11615 To compute LE, swap the arguments, do LT and invert the result.
11616 To compute NE, do EQ and invert the result.
11618 Therefore, the code below must handle only EQ and LT. */
11620 if (code
== LE_EXPR
|| code
== GT_EXPR
)
11625 code
= swap_tree_comparison (code
);
11628 /* Note that it is safe to invert for real values here because we
11629 have already handled the one case that it matters. */
11632 if (code
== NE_EXPR
|| code
== GE_EXPR
)
11635 code
= invert_tree_comparison (code
, false);
11638 /* Compute a result for LT or EQ if args permit;
11639 Otherwise return T. */
11640 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
11642 if (code
== EQ_EXPR
)
11643 result
= tree_int_cst_equal (op0
, op1
);
11644 else if (TYPE_UNSIGNED (TREE_TYPE (op0
)))
11645 result
= INT_CST_LT_UNSIGNED (op0
, op1
);
11647 result
= INT_CST_LT (op0
, op1
);
11654 return constant_boolean_node (result
, type
);
11657 /* Build an expression for the a clean point containing EXPR with type TYPE.
11658 Don't build a cleanup point expression for EXPR which don't have side
11662 fold_build_cleanup_point_expr (tree type
, tree expr
)
11664 /* If the expression does not have side effects then we don't have to wrap
11665 it with a cleanup point expression. */
11666 if (!TREE_SIDE_EFFECTS (expr
))
11669 /* If the expression is a return, check to see if the expression inside the
11670 return has no side effects or the right hand side of the modify expression
11671 inside the return. If either don't have side effects set we don't need to
11672 wrap the expression in a cleanup point expression. Note we don't check the
11673 left hand side of the modify because it should always be a return decl. */
11674 if (TREE_CODE (expr
) == RETURN_EXPR
)
11676 tree op
= TREE_OPERAND (expr
, 0);
11677 if (!op
|| !TREE_SIDE_EFFECTS (op
))
11679 op
= TREE_OPERAND (op
, 1);
11680 if (!TREE_SIDE_EFFECTS (op
))
11684 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
11687 /* Build an expression for the address of T. Folds away INDIRECT_REF to
11688 avoid confusing the gimplify process. */
11691 build_fold_addr_expr_with_type (tree t
, tree ptrtype
)
11693 /* The size of the object is not relevant when talking about its address. */
11694 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
11695 t
= TREE_OPERAND (t
, 0);
11697 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
11698 if (TREE_CODE (t
) == INDIRECT_REF
11699 || TREE_CODE (t
) == MISALIGNED_INDIRECT_REF
)
11701 t
= TREE_OPERAND (t
, 0);
11702 if (TREE_TYPE (t
) != ptrtype
)
11703 t
= build1 (NOP_EXPR
, ptrtype
, t
);
11709 while (handled_component_p (base
))
11710 base
= TREE_OPERAND (base
, 0);
11712 TREE_ADDRESSABLE (base
) = 1;
11714 t
= build1 (ADDR_EXPR
, ptrtype
, t
);
11721 build_fold_addr_expr (tree t
)
11723 return build_fold_addr_expr_with_type (t
, build_pointer_type (TREE_TYPE (t
)));
11726 /* Given a pointer value OP0 and a type TYPE, return a simplified version
11727 of an indirection through OP0, or NULL_TREE if no simplification is
11731 fold_indirect_ref_1 (tree type
, tree op0
)
11737 subtype
= TREE_TYPE (sub
);
11738 if (!POINTER_TYPE_P (subtype
))
11741 if (TREE_CODE (sub
) == ADDR_EXPR
)
11743 tree op
= TREE_OPERAND (sub
, 0);
11744 tree optype
= TREE_TYPE (op
);
11745 /* *&CONST_DECL -> to the value of the const decl. */
11746 if (TREE_CODE (op
) == CONST_DECL
)
11747 return DECL_INITIAL (op
);
11749 if (type
== optype
)
11751 /* *(foo *)&fooarray => fooarray[0] */
11752 else if (TREE_CODE (optype
) == ARRAY_TYPE
11753 && type
== TREE_TYPE (optype
))
11755 tree type_domain
= TYPE_DOMAIN (optype
);
11756 tree min_val
= size_zero_node
;
11757 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
11758 min_val
= TYPE_MIN_VALUE (type_domain
);
11759 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
11763 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
11764 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
11765 && type
== TREE_TYPE (TREE_TYPE (subtype
)))
11768 tree min_val
= size_zero_node
;
11769 sub
= build_fold_indirect_ref (sub
);
11770 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
11771 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
11772 min_val
= TYPE_MIN_VALUE (type_domain
);
11773 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
11779 /* Builds an expression for an indirection through T, simplifying some
11783 build_fold_indirect_ref (tree t
)
11785 tree type
= TREE_TYPE (TREE_TYPE (t
));
11786 tree sub
= fold_indirect_ref_1 (type
, t
);
11791 return build1 (INDIRECT_REF
, type
, t
);
11794 /* Given an INDIRECT_REF T, return either T or a simplified version. */
11797 fold_indirect_ref (tree t
)
11799 tree sub
= fold_indirect_ref_1 (TREE_TYPE (t
), TREE_OPERAND (t
, 0));
11807 /* Strip non-trapping, non-side-effecting tree nodes from an expression
11808 whose result is ignored. The type of the returned tree need not be
11809 the same as the original expression. */
11812 fold_ignored_result (tree t
)
11814 if (!TREE_SIDE_EFFECTS (t
))
11815 return integer_zero_node
;
11818 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
11821 t
= TREE_OPERAND (t
, 0);
11825 case tcc_comparison
:
11826 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
11827 t
= TREE_OPERAND (t
, 0);
11828 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
11829 t
= TREE_OPERAND (t
, 1);
11834 case tcc_expression
:
11835 switch (TREE_CODE (t
))
11837 case COMPOUND_EXPR
:
11838 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
11840 t
= TREE_OPERAND (t
, 0);
11844 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
11845 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
11847 t
= TREE_OPERAND (t
, 0);
11860 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
11861 This can only be applied to objects of a sizetype. */
11864 round_up (tree value
, int divisor
)
11866 tree div
= NULL_TREE
;
11868 gcc_assert (divisor
> 0);
11872 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11873 have to do anything. Only do this when we are not given a const,
11874 because in that case, this check is more expensive than just
11876 if (TREE_CODE (value
) != INTEGER_CST
)
11878 div
= build_int_cst (TREE_TYPE (value
), divisor
);
11880 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
11884 /* If divisor is a power of two, simplify this to bit manipulation. */
11885 if (divisor
== (divisor
& -divisor
))
11889 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
11890 value
= size_binop (PLUS_EXPR
, value
, t
);
11891 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
11892 value
= size_binop (BIT_AND_EXPR
, value
, t
);
11897 div
= build_int_cst (TREE_TYPE (value
), divisor
);
11898 value
= size_binop (CEIL_DIV_EXPR
, value
, div
);
11899 value
= size_binop (MULT_EXPR
, value
, div
);
11905 /* Likewise, but round down. */
11908 round_down (tree value
, int divisor
)
11910 tree div
= NULL_TREE
;
11912 gcc_assert (divisor
> 0);
11916 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11917 have to do anything. Only do this when we are not given a const,
11918 because in that case, this check is more expensive than just
11920 if (TREE_CODE (value
) != INTEGER_CST
)
11922 div
= build_int_cst (TREE_TYPE (value
), divisor
);
11924 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
11928 /* If divisor is a power of two, simplify this to bit manipulation. */
11929 if (divisor
== (divisor
& -divisor
))
11933 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
11934 value
= size_binop (BIT_AND_EXPR
, value
, t
);
11939 div
= build_int_cst (TREE_TYPE (value
), divisor
);
11940 value
= size_binop (FLOOR_DIV_EXPR
, value
, div
);
11941 value
= size_binop (MULT_EXPR
, value
, div
);
11947 /* Returns the pointer to the base of the object addressed by EXP and
11948 extracts the information about the offset of the access, storing it
11949 to PBITPOS and POFFSET. */
11952 split_address_to_core_and_offset (tree exp
,
11953 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
11956 enum machine_mode mode
;
11957 int unsignedp
, volatilep
;
11958 HOST_WIDE_INT bitsize
;
11960 if (TREE_CODE (exp
) == ADDR_EXPR
)
11962 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
11963 poffset
, &mode
, &unsignedp
, &volatilep
,
11965 core
= build_fold_addr_expr (core
);
11971 *poffset
= NULL_TREE
;
11977 /* Returns true if addresses of E1 and E2 differ by a constant, false
11978 otherwise. If they do, E1 - E2 is stored in *DIFF. */
11981 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
11984 HOST_WIDE_INT bitpos1
, bitpos2
;
11985 tree toffset1
, toffset2
, tdiff
, type
;
11987 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
11988 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
11990 if (bitpos1
% BITS_PER_UNIT
!= 0
11991 || bitpos2
% BITS_PER_UNIT
!= 0
11992 || !operand_equal_p (core1
, core2
, 0))
11995 if (toffset1
&& toffset2
)
11997 type
= TREE_TYPE (toffset1
);
11998 if (type
!= TREE_TYPE (toffset2
))
11999 toffset2
= fold_convert (type
, toffset2
);
12001 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
12002 if (!cst_and_fits_in_hwi (tdiff
))
12005 *diff
= int_cst_value (tdiff
);
12007 else if (toffset1
|| toffset2
)
12009 /* If only one of the offsets is non-constant, the difference cannot
12016 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
12020 /* Simplify the floating point expression EXP when the sign of the
12021 result is not significant. Return NULL_TREE if no simplification
12025 fold_strip_sign_ops (tree exp
)
12029 switch (TREE_CODE (exp
))
12033 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
12034 return arg0
? arg0
: TREE_OPERAND (exp
, 0);
12038 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp
))))
12040 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
12041 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
12042 if (arg0
!= NULL_TREE
|| arg1
!= NULL_TREE
)
12043 return fold_build2 (TREE_CODE (exp
), TREE_TYPE (exp
),
12044 arg0
? arg0
: TREE_OPERAND (exp
, 0),
12045 arg1
? arg1
: TREE_OPERAND (exp
, 1));