1 /* Support routines for vrange storage.
2 Copyright (C) 2022-2025 Free Software Foundation, Inc.
3 Contributed by Aldy Hernandez <aldyh@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "tree-pretty-print.h"
29 #include "fold-const.h"
30 #include "gimple-range.h"
31 #include "value-range-storage.h"
33 // Generic memory allocator to share one interface between GC and
34 // obstack allocators.
36 class vrange_internal_alloc
39 vrange_internal_alloc () { }
40 virtual ~vrange_internal_alloc () { }
41 virtual void *alloc (size_t size
) = 0;
42 virtual void free (void *) = 0;
44 DISABLE_COPY_AND_ASSIGN (vrange_internal_alloc
);
47 class vrange_obstack_alloc final
: public vrange_internal_alloc
50 vrange_obstack_alloc ()
52 obstack_init (&m_obstack
);
54 virtual ~vrange_obstack_alloc () final override
56 obstack_free (&m_obstack
, NULL
);
58 virtual void *alloc (size_t size
) final override
60 return obstack_alloc (&m_obstack
, size
);
62 virtual void free (void *) final override
{ }
67 class vrange_ggc_alloc final
: public vrange_internal_alloc
70 vrange_ggc_alloc () { }
71 virtual ~vrange_ggc_alloc () final override
{ }
72 virtual void *alloc (size_t size
) final override
74 return ggc_internal_alloc (size
);
76 virtual void free (void *p
) final override
82 vrange_allocator::vrange_allocator (bool gc
)
85 m_alloc
= new vrange_ggc_alloc
;
87 m_alloc
= new vrange_obstack_alloc
;
90 vrange_allocator::~vrange_allocator ()
96 vrange_allocator::alloc (size_t size
)
98 return m_alloc
->alloc (size
);
102 vrange_allocator::free (void *p
)
107 // Allocate a new vrange_storage object initialized to R and return
111 vrange_allocator::clone (const vrange
&r
)
113 return vrange_storage::alloc (*m_alloc
, r
);
117 vrange_allocator::clone_varying (tree type
)
119 if (irange::supports_p (type
))
120 return irange_storage::alloc (*m_alloc
, int_range
<1> (type
));
121 if (prange::supports_p (type
))
122 return prange_storage::alloc (*m_alloc
, prange (type
));
123 if (frange::supports_p (type
))
124 return frange_storage::alloc (*m_alloc
, frange (type
));
129 vrange_allocator::clone_undefined (tree type
)
131 if (irange::supports_p (type
))
132 return irange_storage::alloc (*m_alloc
, int_range
<1> ());
133 if (prange::supports_p (type
))
134 return prange_storage::alloc (*m_alloc
, prange ());
135 if (frange::supports_p (type
))
136 return frange_storage::alloc (*m_alloc
, frange ());
140 // Allocate a new vrange_storage object initialized to R and return
141 // it. Return NULL if R is unsupported.
144 vrange_storage::alloc (vrange_internal_alloc
&allocator
, const vrange
&r
)
146 if (is_a
<irange
> (r
))
147 return irange_storage::alloc (allocator
, as_a
<irange
> (r
));
148 if (is_a
<prange
> (r
))
149 return prange_storage::alloc (allocator
, as_a
<prange
> (r
));
150 if (is_a
<frange
> (r
))
151 return frange_storage::alloc (allocator
, as_a
<frange
> (r
));
158 vrange_storage::set_vrange (const vrange
&r
)
160 if (is_a
<irange
> (r
))
162 irange_storage
*s
= static_cast <irange_storage
*> (this);
163 gcc_checking_assert (s
->fits_p (as_a
<irange
> (r
)));
164 s
->set_irange (as_a
<irange
> (r
));
166 else if (is_a
<prange
> (r
))
168 prange_storage
*s
= static_cast <prange_storage
*> (this);
169 gcc_checking_assert (s
->fits_p (as_a
<prange
> (r
)));
170 s
->set_prange (as_a
<prange
> (r
));
172 else if (is_a
<frange
> (r
))
174 frange_storage
*s
= static_cast <frange_storage
*> (this);
175 gcc_checking_assert (s
->fits_p (as_a
<frange
> (r
)));
176 s
->set_frange (as_a
<frange
> (r
));
181 // Verify that reading back from the cache didn't drop bits.
183 // FIXME: Avoid checking frange, as it currently pessimizes some ranges:
185 // gfortran.dg/pr49472.f90 pessimizes [0.0, 1.0] into [-0.0, 1.0].
186 && !is_a
<frange
> (r
)
187 && !r
.undefined_p ())
190 get_vrange (tmp
, r
.type ());
191 gcc_checking_assert (tmp
== r
);
195 // Restore R from storage.
198 vrange_storage::get_vrange (vrange
&r
, tree type
) const
200 if (is_a
<irange
> (r
))
202 const irange_storage
*s
= static_cast <const irange_storage
*> (this);
203 s
->get_irange (as_a
<irange
> (r
), type
);
205 else if (is_a
<prange
> (r
))
207 const prange_storage
*s
= static_cast <const prange_storage
*> (this);
208 s
->get_prange (as_a
<prange
> (r
), type
);
210 else if (is_a
<frange
> (r
))
212 const frange_storage
*s
= static_cast <const frange_storage
*> (this);
213 s
->get_frange (as_a
<frange
> (r
), type
);
219 // Return TRUE if storage can fit R.
222 vrange_storage::fits_p (const vrange
&r
) const
224 if (is_a
<irange
> (r
))
226 const irange_storage
*s
= static_cast <const irange_storage
*> (this);
227 return s
->fits_p (as_a
<irange
> (r
));
229 if (is_a
<prange
> (r
))
231 const prange_storage
*s
= static_cast <const prange_storage
*> (this);
232 return s
->fits_p (as_a
<prange
> (r
));
234 if (is_a
<frange
> (r
))
236 const frange_storage
*s
= static_cast <const frange_storage
*> (this);
237 return s
->fits_p (as_a
<frange
> (r
));
243 // Return TRUE if the range in storage is equal to R. It is the
244 // caller's responsibility to verify that the type of the range in
245 // storage matches that of R.
248 vrange_storage::equal_p (const vrange
&r
) const
250 if (is_a
<irange
> (r
))
252 const irange_storage
*s
= static_cast <const irange_storage
*> (this);
253 return s
->equal_p (as_a
<irange
> (r
));
255 if (is_a
<prange
> (r
))
257 const prange_storage
*s
= static_cast <const prange_storage
*> (this);
258 return s
->equal_p (as_a
<prange
> (r
));
260 if (is_a
<frange
> (r
))
262 const frange_storage
*s
= static_cast <const frange_storage
*> (this);
263 return s
->equal_p (as_a
<frange
> (r
));
268 //============================================================================
269 // irange_storage implementation
270 //============================================================================
273 irange_storage::write_lengths_address ()
275 return (unsigned short *)&m_val
[(m_num_ranges
* 2 + 2)
276 * WIDE_INT_MAX_HWIS (m_precision
)];
279 const unsigned short *
280 irange_storage::lengths_address () const
282 return const_cast <irange_storage
*> (this)->write_lengths_address ();
285 // Allocate a new irange_storage object initialized to R.
288 irange_storage::alloc (vrange_internal_alloc
&allocator
, const irange
&r
)
290 size_t size
= irange_storage::size (r
);
291 irange_storage
*p
= static_cast <irange_storage
*> (allocator
.alloc (size
));
292 new (p
) irange_storage (r
);
296 // Initialize the storage with R.
298 irange_storage::irange_storage (const irange
&r
)
299 : m_max_ranges (r
.num_pairs ())
301 m_num_ranges
= m_max_ranges
;
306 write_wide_int (HOST_WIDE_INT
*&val
, unsigned short *&len
, const wide_int
&w
)
309 for (unsigned i
= 0; i
< *len
; ++i
)
314 // Store R into the current storage.
317 irange_storage::set_irange (const irange
&r
)
319 gcc_checking_assert (fits_p (r
));
321 if (r
.undefined_p ())
323 m_kind
= VR_UNDEFINED
;
332 m_precision
= TYPE_PRECISION (r
.type ());
333 m_num_ranges
= r
.num_pairs ();
336 HOST_WIDE_INT
*val
= &m_val
[0];
337 unsigned short *len
= write_lengths_address ();
339 for (unsigned i
= 0; i
< r
.num_pairs (); ++i
)
341 write_wide_int (val
, len
, r
.lower_bound (i
));
342 write_wide_int (val
, len
, r
.upper_bound (i
));
345 // TODO: We could avoid streaming out the value if the mask is -1.
346 irange_bitmask bm
= r
.m_bitmask
;
347 write_wide_int (val
, len
, bm
.value ());
348 write_wide_int (val
, len
, bm
.mask ());
352 read_wide_int (wide_int
&w
,
353 const HOST_WIDE_INT
*val
, unsigned short len
, unsigned prec
)
355 trailing_wide_int_storage
stow (prec
, &len
,
356 const_cast <HOST_WIDE_INT
*> (val
));
357 w
= trailing_wide_int (stow
);
360 // Restore a range of TYPE from storage into R.
363 irange_storage::get_irange (irange
&r
, tree type
) const
365 if (m_kind
== VR_UNDEFINED
)
370 if (m_kind
== VR_VARYING
)
372 r
.set_varying (type
);
376 gcc_checking_assert (TYPE_PRECISION (type
) == m_precision
);
377 const HOST_WIDE_INT
*val
= &m_val
[0];
378 const unsigned short *len
= lengths_address ();
380 // Handle the common case where R can fit the new range.
381 if (r
.m_max_ranges
>= m_num_ranges
)
384 r
.m_num_ranges
= m_num_ranges
;
386 for (unsigned i
= 0; i
< m_num_ranges
* 2; ++i
)
388 read_wide_int (r
.m_base
[i
], val
, *len
, m_precision
);
392 // Otherwise build the range piecewise.
396 for (unsigned i
= 0; i
< m_num_ranges
; ++i
)
399 read_wide_int (lb
, val
, *len
, m_precision
);
401 read_wide_int (ub
, val
, *len
, m_precision
);
403 int_range
<1> tmp (type
, lb
, ub
);
408 wide_int bits_value
, bits_mask
;
409 read_wide_int (bits_value
, val
, *len
, m_precision
);
411 read_wide_int (bits_mask
, val
, *len
, m_precision
);
412 r
.m_bitmask
= irange_bitmask (bits_value
, bits_mask
);
413 if (r
.m_kind
== VR_VARYING
)
421 irange_storage::equal_p (const irange
&r
) const
423 if (m_kind
== VR_UNDEFINED
|| r
.undefined_p ())
424 return m_kind
== r
.m_kind
;
425 if (m_kind
== VR_VARYING
|| r
.varying_p ())
426 return m_kind
== r
.m_kind
;
428 // ?? We could make this faster by doing the comparison in place,
429 // without going through get_irange.
431 get_irange (tmp
, r
.type ());
435 // Return the size in bytes to allocate storage that can hold R.
438 irange_storage::size (const irange
&r
)
440 if (r
.undefined_p ())
441 return sizeof (irange_storage
);
443 unsigned prec
= TYPE_PRECISION (r
.type ());
444 unsigned n
= r
.num_pairs () * 2 + 2;
445 unsigned hwi_size
= ((n
* WIDE_INT_MAX_HWIS (prec
) - 1)
446 * sizeof (HOST_WIDE_INT
));
447 unsigned len_size
= n
* sizeof (unsigned short);
448 return sizeof (irange_storage
) + hwi_size
+ len_size
;
451 // Return TRUE if R fits in the current storage.
454 irange_storage::fits_p (const irange
&r
) const
456 return m_max_ranges
>= r
.num_pairs ();
460 irange_storage::dump () const
462 fprintf (stderr
, "irange_storage (prec=%d, ranges=%d):\n",
463 m_precision
, m_num_ranges
);
465 if (m_num_ranges
== 0)
468 const HOST_WIDE_INT
*val
= &m_val
[0];
469 const unsigned short *len
= lengths_address ();
472 fprintf (stderr
, " lengths = [ ");
473 for (i
= 0; i
< m_num_ranges
* 2 + 2; ++i
)
474 fprintf (stderr
, "%d ", len
[i
]);
475 fprintf (stderr
, "]\n");
477 for (i
= 0; i
< m_num_ranges
; ++i
)
479 for (j
= 0; j
< *len
; ++j
)
480 fprintf (stderr
, " [PAIR %d] LB " HOST_WIDE_INT_PRINT_DEC
"\n", i
,
483 for (j
= 0; j
< *len
; ++j
)
484 fprintf (stderr
, " [PAIR %d] UB " HOST_WIDE_INT_PRINT_DEC
"\n", i
,
489 // Dump value/mask pair.
490 for (j
= 0; j
< *len
; ++j
)
491 fprintf (stderr
, " [VALUE] " HOST_WIDE_INT_PRINT_DEC
"\n", *val
++);
493 for (j
= 0; j
< *len
; ++j
)
494 fprintf (stderr
, " [MASK] " HOST_WIDE_INT_PRINT_DEC
"\n", *val
++);
498 debug (const irange_storage
&storage
)
501 fprintf (stderr
, "\n");
504 //============================================================================
505 // frange_storage implementation
506 //============================================================================
508 // Allocate a new frange_storage object initialized to R.
511 frange_storage::alloc (vrange_internal_alloc
&allocator
, const frange
&r
)
513 size_t size
= sizeof (frange_storage
);
514 frange_storage
*p
= static_cast <frange_storage
*> (allocator
.alloc (size
));
515 new (p
) frange_storage (r
);
520 frange_storage::set_frange (const frange
&r
)
522 gcc_checking_assert (fits_p (r
));
527 m_pos_nan
= r
.m_pos_nan
;
528 m_neg_nan
= r
.m_neg_nan
;
532 frange_storage::get_frange (frange
&r
, tree type
) const
534 gcc_checking_assert (r
.supports_type_p (type
));
536 // Handle explicit NANs.
537 if (m_kind
== VR_NAN
)
539 if (HONOR_NANS (type
))
541 if (m_pos_nan
&& m_neg_nan
)
544 r
.set_nan (type
, m_neg_nan
);
550 if (m_kind
== VR_UNDEFINED
)
556 // We use the constructor to create the new range instead of writing
557 // out the bits into the frange directly, because the global range
558 // being read may be being inlined into a function with different
559 // restrictions as when it was originally written. We want to make
560 // sure the resulting range is canonicalized correctly for the new
562 r
= frange (type
, m_min
, m_max
, m_kind
);
564 // The constructor will set the NAN bits for HONOR_NANS, but we must
565 // make sure to set the NAN sign if known.
566 if (HONOR_NANS (type
) && (m_pos_nan
^ m_neg_nan
) == 1)
567 r
.update_nan (m_neg_nan
);
568 else if (!m_pos_nan
&& !m_neg_nan
)
573 frange_storage::equal_p (const frange
&r
) const
575 if (r
.undefined_p ())
576 return m_kind
== VR_UNDEFINED
;
579 get_frange (tmp
, r
.type ());
584 frange_storage::fits_p (const frange
&) const
589 //============================================================================
590 // prange_storage implementation
591 //============================================================================
594 prange_storage::alloc (vrange_internal_alloc
&allocator
, const prange
&r
)
596 size_t size
= sizeof (prange_storage
);
597 if (!r
.undefined_p ())
599 unsigned prec
= TYPE_PRECISION (r
.type ());
600 size
+= trailing_wide_ints
<NINTS
>::extra_size (prec
);
602 prange_storage
*p
= static_cast <prange_storage
*> (allocator
.alloc (size
));
603 new (p
) prange_storage (r
);
607 // Initialize the storage with R.
609 prange_storage::prange_storage (const prange
&r
)
611 // It is the caller's responsibility to allocate enough space such
612 // that the precision fits.
613 if (r
.undefined_p ())
614 // Undefined ranges do not require any extra space for trailing
616 m_trailing_ints
.set_precision (0);
618 m_trailing_ints
.set_precision (TYPE_PRECISION (r
.type ()));
624 prange_storage::set_prange (const prange
&r
)
626 if (r
.undefined_p ())
627 m_kind
= VR_UNDEFINED
;
628 else if (r
.varying_p ())
633 set_low (r
.lower_bound ());
634 set_high (r
.upper_bound ());
635 irange_bitmask bm
= r
.m_bitmask
;
636 set_value (bm
.value ());
637 set_mask (bm
.mask ());
642 prange_storage::get_prange (prange
&r
, tree type
) const
644 gcc_checking_assert (r
.supports_type_p (type
));
646 if (m_kind
== VR_UNDEFINED
)
648 else if (m_kind
== VR_VARYING
)
649 r
.set_varying (type
);
652 gcc_checking_assert (m_kind
== VR_RANGE
);
653 gcc_checking_assert (TYPE_PRECISION (type
) == m_trailing_ints
.get_precision ());
656 r
.m_min
= get_low ();
657 r
.m_max
= get_high ();
658 r
.m_bitmask
= irange_bitmask (get_value (), get_mask ());
665 prange_storage::equal_p (const prange
&r
) const
667 if (r
.undefined_p ())
668 return m_kind
== VR_UNDEFINED
;
671 get_prange (tmp
, r
.type ());
676 prange_storage::fits_p (const prange
&r
) const
678 // Undefined ranges always fit, because they don't store anything in
679 // the trailing wide ints.
680 if (r
.undefined_p ())
683 return TYPE_PRECISION (r
.type ()) <= m_trailing_ints
.get_precision ();
687 static vrange_allocator
ggc_vrange_allocator (true);
689 vrange_storage
*ggc_alloc_vrange_storage (tree type
)
691 return ggc_vrange_allocator
.clone_varying (type
);
694 vrange_storage
*ggc_alloc_vrange_storage (const vrange
&r
)
696 return ggc_vrange_allocator
.clone (r
);