2 * Copyright (C) 2005, 2006, 2007, 2008 Apple Inc. All rights reserved.
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
14 * You should have received a copy of the GNU Library General Public License
15 * along with this library; see the file COPYING.LIB. If not, write to
16 * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
17 * Boston, MA 02110-1301, USA.
24 #include "wtf/Alignment.h"
25 #include "wtf/ConditionalDestructor.h"
26 #include "wtf/ContainerAnnotations.h"
27 #include "wtf/DefaultAllocator.h"
28 #include "wtf/FastAllocBase.h"
29 #include "wtf/Noncopyable.h"
30 #include "wtf/NotFound.h"
31 #include "wtf/StdLibExtras.h"
32 #include "wtf/VectorTraits.h"
38 // For ASAN builds, disable inline buffers completely as they cause various issues.
39 #ifdef ANNOTATE_CONTIGUOUS_CONTAINER
40 #define INLINE_CAPACITY 0
42 #define INLINE_CAPACITY inlineCapacity
47 #if defined(MEMORY_SANITIZER_INITIAL_SIZE)
48 static const size_t kInitialVectorSize
= 1;
50 #ifndef WTF_VECTOR_INITIAL_SIZE
51 #define WTF_VECTOR_INITIAL_SIZE 4
53 static const size_t kInitialVectorSize
= WTF_VECTOR_INITIAL_SIZE
;
56 template<typename T
, size_t inlineBuffer
, typename Allocator
>
59 template <bool needsDestruction
, typename T
>
60 struct VectorDestructor
;
63 struct VectorDestructor
<false, T
>
65 static void destruct(T
*, T
*) {}
69 struct VectorDestructor
<true, T
>
71 static void destruct(T
* begin
, T
* end
)
73 for (T
* cur
= begin
; cur
!= end
; ++cur
)
78 template <bool unusedSlotsMustBeZeroed
, typename T
>
79 struct VectorUnusedSlotClearer
;
82 struct VectorUnusedSlotClearer
<false, T
> {
83 static void clear(T
*, T
*) { }
85 static void checkCleared(const T
*, const T
*) { }
90 struct VectorUnusedSlotClearer
<true, T
> {
91 static void clear(T
* begin
, T
* end
)
93 memset(reinterpret_cast<void*>(begin
), 0, sizeof(T
) * (end
- begin
));
97 static void checkCleared(const T
* begin
, const T
* end
)
99 const unsigned char* unusedArea
= reinterpret_cast<const unsigned char*>(begin
);
100 const unsigned char* endAddress
= reinterpret_cast<const unsigned char*>(end
);
101 ASSERT(endAddress
>= unusedArea
);
102 for (int i
= 0; i
< endAddress
- unusedArea
; ++i
)
103 ASSERT(!unusedArea
[i
]);
108 template <bool canInitializeWithMemset
, typename T
>
109 struct VectorInitializer
;
112 struct VectorInitializer
<false, T
>
114 static void initialize(T
* begin
, T
* end
)
116 for (T
* cur
= begin
; cur
!= end
; ++cur
)
117 new (NotNull
, cur
) T
;
122 struct VectorInitializer
<true, T
>
124 static void initialize(T
* begin
, T
* end
)
126 memset(begin
, 0, reinterpret_cast<char*>(end
) - reinterpret_cast<char*>(begin
));
130 template <bool canMoveWithMemcpy
, typename T
>
134 struct VectorMover
<false, T
>
136 static void move(const T
* src
, const T
* srcEnd
, T
* dst
)
138 while (src
!= srcEnd
) {
139 new (NotNull
, dst
) T(*src
);
145 static void moveOverlapping(const T
* src
, const T
* srcEnd
, T
* dst
)
148 move(src
, srcEnd
, dst
);
150 T
* dstEnd
= dst
+ (srcEnd
- src
);
151 while (src
!= srcEnd
) {
154 new (NotNull
, dstEnd
) T(*srcEnd
);
159 static void swap(T
* src
, T
* srcEnd
, T
* dst
)
161 std::swap_ranges(src
, srcEnd
, dst
);
166 struct VectorMover
<true, T
>
168 static void move(const T
* src
, const T
* srcEnd
, T
* dst
)
170 if (LIKELY(dst
&& src
))
171 memcpy(dst
, src
, reinterpret_cast<const char*>(srcEnd
) - reinterpret_cast<const char*>(src
));
173 static void moveOverlapping(const T
* src
, const T
* srcEnd
, T
* dst
)
175 if (LIKELY(dst
&& src
))
176 memmove(dst
, src
, reinterpret_cast<const char*>(srcEnd
) - reinterpret_cast<const char*>(src
));
178 static void swap(T
* src
, T
* srcEnd
, T
* dst
)
180 std::swap_ranges(reinterpret_cast<char*>(src
), reinterpret_cast<char*>(srcEnd
), reinterpret_cast<char*>(dst
));
184 template <bool canCopyWithMemcpy
, typename T
>
188 struct VectorCopier
<false, T
>
191 static void uninitializedCopy(const U
* src
, const U
* srcEnd
, T
* dst
)
193 while (src
!= srcEnd
) {
194 new (NotNull
, dst
) T(*src
);
202 struct VectorCopier
<true, T
>
204 static void uninitializedCopy(const T
* src
, const T
* srcEnd
, T
* dst
)
206 if (LIKELY(dst
&& src
))
207 memcpy(dst
, src
, reinterpret_cast<const char*>(srcEnd
) - reinterpret_cast<const char*>(src
));
210 static void uninitializedCopy(const U
* src
, const U
* srcEnd
, T
* dst
)
212 VectorCopier
<false, T
>::uninitializedCopy(src
, srcEnd
, dst
);
216 template <bool canFillWithMemset
, typename T
>
220 struct VectorFiller
<false, T
>
222 static void uninitializedFill(T
* dst
, T
* dstEnd
, const T
& val
)
224 while (dst
!= dstEnd
) {
225 new (NotNull
, dst
) T(val
);
232 struct VectorFiller
<true, T
>
234 static void uninitializedFill(T
* dst
, T
* dstEnd
, const T
& val
)
236 static_assert(sizeof(T
) == sizeof(char), "size of type should be one");
237 #if COMPILER(GCC) && defined(_FORTIFY_SOURCE)
238 if (!__builtin_constant_p(dstEnd
- dst
) || (!(dstEnd
- dst
)))
240 memset(dst
, val
, dstEnd
- dst
);
244 template<bool canCompareWithMemcmp
, typename T
>
245 struct VectorComparer
;
248 struct VectorComparer
<false, T
>
250 static bool compare(const T
* a
, const T
* b
, size_t size
)
254 return std::equal(a
, a
+ size
, b
);
259 struct VectorComparer
<true, T
>
261 static bool compare(const T
* a
, const T
* b
, size_t size
)
265 return memcmp(a
, b
, sizeof(T
) * size
) == 0;
270 struct VectorTypeOperations
272 static void destruct(T
* begin
, T
* end
)
274 VectorDestructor
<VectorTraits
<T
>::needsDestruction
, T
>::destruct(begin
, end
);
277 static void initialize(T
* begin
, T
* end
)
279 VectorInitializer
<VectorTraits
<T
>::canInitializeWithMemset
, T
>::initialize(begin
, end
);
282 static void move(const T
* src
, const T
* srcEnd
, T
* dst
)
284 VectorMover
<VectorTraits
<T
>::canMoveWithMemcpy
, T
>::move(src
, srcEnd
, dst
);
287 static void moveOverlapping(const T
* src
, const T
* srcEnd
, T
* dst
)
289 VectorMover
<VectorTraits
<T
>::canMoveWithMemcpy
, T
>::moveOverlapping(src
, srcEnd
, dst
);
292 static void swap(T
* src
, T
* srcEnd
, T
* dst
)
294 VectorMover
<VectorTraits
<T
>::canMoveWithMemcpy
, T
>::swap(src
, srcEnd
, dst
);
297 static void uninitializedCopy(const T
* src
, const T
* srcEnd
, T
* dst
)
299 VectorCopier
<VectorTraits
<T
>::canCopyWithMemcpy
, T
>::uninitializedCopy(src
, srcEnd
, dst
);
302 static void uninitializedFill(T
* dst
, T
* dstEnd
, const T
& val
)
304 VectorFiller
<VectorTraits
<T
>::canFillWithMemset
, T
>::uninitializedFill(dst
, dstEnd
, val
);
307 static bool compare(const T
* a
, const T
* b
, size_t size
)
309 return VectorComparer
<VectorTraits
<T
>::canCompareWithMemcmp
, T
>::compare(a
, b
, size
);
313 template<typename T
, bool hasInlineCapacity
, typename Allocator
>
314 class VectorBufferBase
{
315 WTF_MAKE_NONCOPYABLE(VectorBufferBase
);
317 void allocateBuffer(size_t newCapacity
)
320 size_t sizeToAllocate
= allocationSize(newCapacity
);
321 if (hasInlineCapacity
)
322 m_buffer
= Allocator::template allocateInlineVectorBacking
<T
>(sizeToAllocate
);
324 m_buffer
= Allocator::template allocateVectorBacking
<T
>(sizeToAllocate
);
325 m_capacity
= sizeToAllocate
/ sizeof(T
);
328 void allocateExpandedBuffer(size_t newCapacity
)
331 size_t sizeToAllocate
= allocationSize(newCapacity
);
332 if (hasInlineCapacity
)
333 m_buffer
= Allocator::template allocateInlineVectorBacking
<T
>(sizeToAllocate
);
335 m_buffer
= Allocator::template allocateExpandedVectorBacking
<T
>(sizeToAllocate
);
336 m_capacity
= sizeToAllocate
/ sizeof(T
);
339 size_t allocationSize(size_t capacity
) const
341 return Allocator::template quantizedSize
<T
>(capacity
);
344 T
* buffer() { return m_buffer
; }
345 const T
* buffer() const { return m_buffer
; }
346 size_t capacity() const { return m_capacity
; }
348 void clearUnusedSlots(T
* from
, T
* to
)
350 // If the vector backing is garbage-collected and needs tracing
351 // or finalizing, we clear out the unused slots so that the visitor
352 // or the finalizer does not cause a problem when visiting the
354 VectorUnusedSlotClearer
<Allocator::isGarbageCollected
&& (VectorTraits
<T
>::needsDestruction
|| ShouldBeTraced
<VectorTraits
<T
>>::value
), T
>::clear(from
, to
);
357 void checkUnusedSlots(const T
* from
, const T
* to
)
359 #if ENABLE(ASSERT) && !defined(ANNOTATE_CONTIGUOUS_CONTAINER)
360 VectorUnusedSlotClearer
<Allocator::isGarbageCollected
&& (VectorTraits
<T
>::needsDestruction
|| ShouldBeTraced
<VectorTraits
<T
>>::value
), T
>::checkCleared(from
, to
);
371 VectorBufferBase(T
* buffer
, size_t capacity
)
373 , m_capacity(capacity
)
382 template<typename T
, size_t inlineCapacity
, typename Allocator
= DefaultAllocator
>
385 template<typename T
, typename Allocator
>
386 class VectorBuffer
<T
, 0, Allocator
> : protected VectorBufferBase
<T
, false, Allocator
> {
388 typedef VectorBufferBase
<T
, false, Allocator
> Base
;
394 VectorBuffer(size_t capacity
)
396 // Calling malloc(0) might take a lock and may actually do an
397 // allocation on some systems.
399 allocateBuffer(capacity
);
404 deallocateBuffer(m_buffer
);
408 void deallocateBuffer(T
* bufferToDeallocate
)
410 Allocator::freeVectorBacking(bufferToDeallocate
);
413 bool expandBuffer(size_t newCapacity
)
415 size_t sizeToAllocate
= allocationSize(newCapacity
);
416 if (Allocator::expandVectorBacking(m_buffer
, sizeToAllocate
)) {
417 m_capacity
= sizeToAllocate
/ sizeof(T
);
423 inline bool shrinkBuffer(size_t newCapacity
)
425 ASSERT(newCapacity
< capacity());
426 size_t sizeToAllocate
= allocationSize(newCapacity
);
427 if (Allocator::shrinkVectorBacking(m_buffer
, allocationSize(capacity()), sizeToAllocate
)) {
428 m_capacity
= sizeToAllocate
/ sizeof(T
);
434 void resetBufferPointer()
440 void swapVectorBuffer(VectorBuffer
<T
, 0, Allocator
>& other
)
442 std::swap(m_buffer
, other
.m_buffer
);
443 std::swap(m_capacity
, other
.m_capacity
);
446 using Base::allocateBuffer
;
447 using Base::allocationSize
;
450 using Base::capacity
;
452 using Base::clearUnusedSlots
;
453 using Base::checkUnusedSlots
;
455 bool hasOutOfLineBuffer() const
457 // When inlineCapacity is 0 we have an out of line buffer if we have a buffer.
465 using Base::m_buffer
;
466 using Base::m_capacity
;
469 template<typename T
, size_t inlineCapacity
, typename Allocator
>
470 class VectorBuffer
: protected VectorBufferBase
<T
, true, Allocator
> {
471 WTF_MAKE_NONCOPYABLE(VectorBuffer
);
473 typedef VectorBufferBase
<T
, true, Allocator
> Base
;
476 : Base(inlineBuffer(), inlineCapacity
)
480 VectorBuffer(size_t capacity
)
481 : Base(inlineBuffer(), inlineCapacity
)
483 if (capacity
> inlineCapacity
)
484 Base::allocateBuffer(capacity
);
489 deallocateBuffer(m_buffer
);
493 NEVER_INLINE
void reallyDeallocateBuffer(T
* bufferToDeallocate
)
495 Allocator::freeInlineVectorBacking(bufferToDeallocate
);
498 void deallocateBuffer(T
* bufferToDeallocate
)
500 if (UNLIKELY(bufferToDeallocate
!= inlineBuffer()))
501 reallyDeallocateBuffer(bufferToDeallocate
);
504 bool expandBuffer(size_t newCapacity
)
506 ASSERT(newCapacity
> inlineCapacity
);
507 if (m_buffer
== inlineBuffer())
510 size_t sizeToAllocate
= allocationSize(newCapacity
);
511 if (Allocator::expandInlineVectorBacking(m_buffer
, sizeToAllocate
)) {
512 m_capacity
= sizeToAllocate
/ sizeof(T
);
518 inline bool shrinkBuffer(size_t newCapacity
)
520 ASSERT(newCapacity
< capacity());
521 if (newCapacity
<= inlineCapacity
) {
522 // We need to switch to inlineBuffer. Vector::shrinkCapacity
526 ASSERT(m_buffer
!= inlineBuffer());
527 size_t newSize
= allocationSize(newCapacity
);
528 if (!Allocator::shrinkInlineVectorBacking(m_buffer
, allocationSize(capacity()), newSize
))
530 m_capacity
= newSize
/ sizeof(T
);
534 void resetBufferPointer()
536 m_buffer
= inlineBuffer();
537 m_capacity
= inlineCapacity
;
540 void allocateBuffer(size_t newCapacity
)
542 // FIXME: This should ASSERT(!m_buffer) to catch misuse/leaks.
543 if (newCapacity
> inlineCapacity
)
544 Base::allocateBuffer(newCapacity
);
546 resetBufferPointer();
549 void allocateExpandedBuffer(size_t newCapacity
)
551 if (newCapacity
> inlineCapacity
)
552 Base::allocateExpandedBuffer(newCapacity
);
554 resetBufferPointer();
557 size_t allocationSize(size_t capacity
) const
559 if (capacity
<= inlineCapacity
)
560 return m_inlineBufferSize
;
561 return Base::allocationSize(capacity
);
564 void swapVectorBuffer(VectorBuffer
<T
, inlineCapacity
, Allocator
>& other
)
566 typedef VectorTypeOperations
<T
> TypeOperations
;
568 if (buffer() == inlineBuffer() && other
.buffer() == other
.inlineBuffer()) {
569 ASSERT(m_capacity
== other
.m_capacity
);
570 if (m_size
> other
.m_size
) {
571 ANNOTATE_CHANGE_SIZE(other
.inlineBuffer(), inlineCapacity
, other
.m_size
, m_size
);
572 TypeOperations::swap(inlineBuffer(), inlineBuffer() + other
.m_size
, other
.inlineBuffer());
573 TypeOperations::move(inlineBuffer() + other
.m_size
, inlineBuffer() + m_size
, other
.inlineBuffer() + other
.m_size
);
574 Base::clearUnusedSlots(inlineBuffer() + other
.m_size
, inlineBuffer() + m_size
);
575 ANNOTATE_CHANGE_SIZE(inlineBuffer(), inlineCapacity
, m_size
, other
.m_size
);
577 ANNOTATE_CHANGE_SIZE(inlineBuffer(), inlineCapacity
, m_size
, other
.m_size
);
578 TypeOperations::swap(inlineBuffer(), inlineBuffer() + m_size
, other
.inlineBuffer());
579 TypeOperations::move(other
.inlineBuffer() + m_size
, other
.inlineBuffer() + other
.m_size
, inlineBuffer() + m_size
);
580 Base::clearUnusedSlots(other
.inlineBuffer() + m_size
, other
.inlineBuffer() + other
.m_size
);
581 ANNOTATE_CHANGE_SIZE(other
.inlineBuffer(), inlineCapacity
, other
.m_size
, m_size
);
583 } else if (buffer() == inlineBuffer()) {
584 ANNOTATE_DELETE_BUFFER(m_buffer
, inlineCapacity
, m_size
);
585 m_buffer
= other
.m_buffer
;
586 other
.m_buffer
= other
.inlineBuffer();
587 ANNOTATE_NEW_BUFFER(other
.m_buffer
, inlineCapacity
, m_size
);
588 TypeOperations::move(inlineBuffer(), inlineBuffer() + m_size
, other
.inlineBuffer());
589 Base::clearUnusedSlots(inlineBuffer(), inlineBuffer() + m_size
);
590 std::swap(m_capacity
, other
.m_capacity
);
591 } else if (other
.buffer() == other
.inlineBuffer()) {
592 ANNOTATE_DELETE_BUFFER(other
.m_buffer
, inlineCapacity
, other
.m_size
);
593 other
.m_buffer
= m_buffer
;
594 m_buffer
= inlineBuffer();
595 ANNOTATE_NEW_BUFFER(m_buffer
, inlineCapacity
, other
.m_size
);
596 TypeOperations::move(other
.inlineBuffer(), other
.inlineBuffer() + other
.m_size
, inlineBuffer());
597 Base::clearUnusedSlots(other
.inlineBuffer(), other
.inlineBuffer() + other
.m_size
);
598 std::swap(m_capacity
, other
.m_capacity
);
600 std::swap(m_buffer
, other
.m_buffer
);
601 std::swap(m_capacity
, other
.m_capacity
);
606 using Base::capacity
;
608 bool hasOutOfLineBuffer() const
610 return buffer() && buffer() != inlineBuffer();
617 using Base::m_buffer
;
618 using Base::m_capacity
;
620 static const size_t m_inlineBufferSize
= inlineCapacity
* sizeof(T
);
621 T
* inlineBuffer() { return reinterpret_cast_ptr
<T
*>(m_inlineBuffer
.buffer
); }
622 const T
* inlineBuffer() const { return reinterpret_cast_ptr
<const T
*>(m_inlineBuffer
.buffer
); }
624 AlignedBuffer
<m_inlineBufferSize
, WTF_ALIGN_OF(T
)> m_inlineBuffer
;
625 template<typename U
, size_t inlineBuffer
, typename V
>
629 template<typename T
, size_t inlineCapacity
= 0, typename Allocator
= DefaultAllocator
> // Heap-allocated vectors with no inlineCapacity never need a destructor.
630 class Vector
: private VectorBuffer
<T
, INLINE_CAPACITY
, Allocator
>, public ConditionalDestructor
<Vector
<T
, INLINE_CAPACITY
, Allocator
>, (INLINE_CAPACITY
== 0) && Allocator::isGarbageCollected
> {
631 WTF_USE_ALLOCATOR(Vector
, Allocator
);
633 typedef VectorBuffer
<T
, INLINE_CAPACITY
, Allocator
> Base
;
634 typedef VectorTypeOperations
<T
> TypeOperations
;
640 typedef const T
* const_iterator
;
641 typedef std::reverse_iterator
<iterator
> reverse_iterator
;
642 typedef std::reverse_iterator
<const_iterator
> const_reverse_iterator
;
646 static_assert(!IsPolymorphic
<T
>::value
|| !VectorTraits
<T
>::canInitializeWithMemset
, "Cannot initialize with memset if there is a vtable");
648 static_assert(Allocator::isGarbageCollected
|| !IsAllowOnlyInlineAllocation
<T
>::value
|| !NeedsTracing
<T
>::value
, "Cannot put ALLOW_ONLY_INLINE_ALLOCATION objects that have trace methods into an off-heap Vector");
650 ANNOTATE_NEW_BUFFER(begin(), capacity(), 0);
654 explicit Vector(size_t size
)
657 static_assert(!IsPolymorphic
<T
>::value
|| !VectorTraits
<T
>::canInitializeWithMemset
, "Cannot initialize with memset if there is a vtable");
659 static_assert(Allocator::isGarbageCollected
|| !IsAllowOnlyInlineAllocation
<T
>::value
|| !NeedsTracing
<T
>::value
, "Cannot put ALLOW_ONLY_INLINE_ALLOCATION objects that have trace methods into an off-heap Vector");
661 ANNOTATE_NEW_BUFFER(begin(), capacity(), size
);
663 TypeOperations::initialize(begin(), end());
666 // Off-GC-heap vectors: Destructor should be called.
667 // On-GC-heap vectors: Destructor should be called for inline buffers
668 // (if any) but destructor shouldn't be called for vector backing since
669 // it is managed by the traced GC heap.
672 if (!INLINE_CAPACITY
) {
673 if (LIKELY(!Base::buffer()))
676 ANNOTATE_DELETE_BUFFER(begin(), capacity(), m_size
);
677 if (LIKELY(m_size
) && !(Allocator::isGarbageCollected
&& this->hasOutOfLineBuffer())) {
678 TypeOperations::destruct(begin(), end());
679 m_size
= 0; // Partial protection against use-after-free.
685 void finalizeGarbageCollectedObject()
690 Vector(const Vector
&);
691 template<size_t otherCapacity
>
692 explicit Vector(const Vector
<T
, otherCapacity
, Allocator
>&);
694 Vector
& operator=(const Vector
&);
695 template<size_t otherCapacity
>
696 Vector
& operator=(const Vector
<T
, otherCapacity
, Allocator
>&);
699 Vector
& operator=(Vector
&&);
701 size_t size() const { return m_size
; }
702 size_t capacity() const { return Base::capacity(); }
703 bool isEmpty() const { return !size(); }
707 RELEASE_ASSERT(i
< size());
708 return Base::buffer()[i
];
710 const T
& at(size_t i
) const
712 RELEASE_ASSERT(i
< size());
713 return Base::buffer()[i
];
716 T
& operator[](size_t i
) { return at(i
); }
717 const T
& operator[](size_t i
) const { return at(i
); }
719 T
* data() { return Base::buffer(); }
720 const T
* data() const { return Base::buffer(); }
722 iterator
begin() { return data(); }
723 iterator
end() { return begin() + m_size
; }
724 const_iterator
begin() const { return data(); }
725 const_iterator
end() const { return begin() + m_size
; }
727 reverse_iterator
rbegin() { return reverse_iterator(end()); }
728 reverse_iterator
rend() { return reverse_iterator(begin()); }
729 const_reverse_iterator
rbegin() const { return const_reverse_iterator(end()); }
730 const_reverse_iterator
rend() const { return const_reverse_iterator(begin()); }
732 T
& first() { return at(0); }
733 const T
& first() const { return at(0); }
734 T
& last() { return at(size() - 1); }
735 const T
& last() const { return at(size() - 1); }
737 template<typename U
> bool contains(const U
&) const;
738 template<typename U
> size_t find(const U
&) const;
739 template<typename U
> size_t reverseFind(const U
&) const;
741 void shrink(size_t size
);
742 void grow(size_t size
);
743 void resize(size_t size
);
744 void reserveCapacity(size_t newCapacity
);
745 void reserveInitialCapacity(size_t initialCapacity
);
746 void shrinkToFit() { shrinkCapacity(size()); }
747 void shrinkToReasonableCapacity()
749 if (size() * 2 < capacity())
750 shrinkCapacity(size() + size() / 4 + 1);
753 void clear() { shrinkCapacity(0); }
755 template<typename U
> void append(const U
*, size_t);
756 template<typename U
> void append(const U
&);
757 template<typename U
> void uncheckedAppend(const U
& val
);
758 template<typename U
, size_t otherCapacity
, typename V
> void appendVector(const Vector
<U
, otherCapacity
, V
>&);
760 template<typename U
> void insert(size_t position
, const U
*, size_t);
761 template<typename U
> void insert(size_t position
, const U
&);
762 template<typename U
, size_t c
, typename V
> void insert(size_t position
, const Vector
<U
, c
, V
>&);
764 template<typename U
> void prepend(const U
*, size_t);
765 template<typename U
> void prepend(const U
&);
766 template<typename U
, size_t c
, typename V
> void prepend(const Vector
<U
, c
, V
>&);
768 void remove(size_t position
);
769 void remove(size_t position
, size_t length
);
777 Vector(size_t size
, const T
& val
)
780 ANNOTATE_NEW_BUFFER(begin(), capacity(), size
);
782 TypeOperations::uninitializedFill(begin(), end(), val
);
785 void fill(const T
&, size_t);
786 void fill(const T
& val
) { fill(val
, size()); }
788 template<typename Iterator
> void appendRange(Iterator start
, Iterator end
);
790 void swap(Vector
& other
)
792 Base::swapVectorBuffer(other
);
793 std::swap(m_size
, other
.m_size
);
798 template<typename VisitorDispatcher
> void trace(VisitorDispatcher
);
801 void expandCapacity(size_t newMinCapacity
);
802 const T
* expandCapacity(size_t newMinCapacity
, const T
*);
803 template<typename U
> U
* expandCapacity(size_t newMinCapacity
, U
*);
804 void shrinkCapacity(size_t newCapacity
);
805 template<typename U
> void appendSlowCase(const U
&);
809 using Base::capacity
;
810 using Base::swapVectorBuffer
;
811 using Base::allocateBuffer
;
812 using Base::allocationSize
;
813 using Base::clearUnusedSlots
;
814 using Base::checkUnusedSlots
;
817 template<typename T
, size_t inlineCapacity
, typename Allocator
>
818 Vector
<T
, inlineCapacity
, Allocator
>::Vector(const Vector
& other
)
819 : Base(other
.capacity())
821 ANNOTATE_NEW_BUFFER(begin(), capacity(), other
.size());
822 m_size
= other
.size();
823 TypeOperations::uninitializedCopy(other
.begin(), other
.end(), begin());
826 template<typename T
, size_t inlineCapacity
, typename Allocator
>
827 template<size_t otherCapacity
>
828 Vector
<T
, inlineCapacity
, Allocator
>::Vector(const Vector
<T
, otherCapacity
, Allocator
>& other
)
829 : Base(other
.capacity())
831 ANNOTATE_NEW_BUFFER(begin(), capacity(), other
.size());
832 m_size
= other
.size();
833 TypeOperations::uninitializedCopy(other
.begin(), other
.end(), begin());
836 template<typename T
, size_t inlineCapacity
, typename Allocator
>
837 Vector
<T
, inlineCapacity
, Allocator
>& Vector
<T
, inlineCapacity
, Allocator
>::operator=(const Vector
<T
, inlineCapacity
, Allocator
>& other
)
839 if (UNLIKELY(&other
== this))
842 if (size() > other
.size())
843 shrink(other
.size());
844 else if (other
.size() > capacity()) {
846 reserveCapacity(other
.size());
850 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size
, other
.size());
851 std::copy(other
.begin(), other
.begin() + size(), begin());
852 TypeOperations::uninitializedCopy(other
.begin() + size(), other
.end(), end());
853 m_size
= other
.size();
858 inline bool typelessPointersAreEqual(const void* a
, const void* b
) { return a
== b
; }
860 template<typename T
, size_t inlineCapacity
, typename Allocator
>
861 template<size_t otherCapacity
>
862 Vector
<T
, inlineCapacity
, Allocator
>& Vector
<T
, inlineCapacity
, Allocator
>::operator=(const Vector
<T
, otherCapacity
, Allocator
>& other
)
864 // If the inline capacities match, we should call the more specific
865 // template. If the inline capacities don't match, the two objects
866 // shouldn't be allocated the same address.
867 ASSERT(!typelessPointersAreEqual(&other
, this));
869 if (size() > other
.size())
870 shrink(other
.size());
871 else if (other
.size() > capacity()) {
873 reserveCapacity(other
.size());
877 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size
, other
.size());
878 std::copy(other
.begin(), other
.begin() + size(), begin());
879 TypeOperations::uninitializedCopy(other
.begin() + size(), other
.end(), end());
880 m_size
= other
.size();
885 template<typename T
, size_t inlineCapacity
, typename Allocator
>
886 Vector
<T
, inlineCapacity
, Allocator
>::Vector(Vector
<T
, inlineCapacity
, Allocator
>&& other
)
889 // It's a little weird to implement a move constructor using swap but this way we
890 // don't have to add a move constructor to VectorBuffer.
894 template<typename T
, size_t inlineCapacity
, typename Allocator
>
895 Vector
<T
, inlineCapacity
, Allocator
>& Vector
<T
, inlineCapacity
, Allocator
>::operator=(Vector
<T
, inlineCapacity
, Allocator
>&& other
)
901 template<typename T
, size_t inlineCapacity
, typename Allocator
>
903 bool Vector
<T
, inlineCapacity
, Allocator
>::contains(const U
& value
) const
905 return find(value
) != kNotFound
;
908 template<typename T
, size_t inlineCapacity
, typename Allocator
>
910 size_t Vector
<T
, inlineCapacity
, Allocator
>::find(const U
& value
) const
912 const T
* b
= begin();
914 for (const T
* iter
= b
; iter
< e
; ++iter
) {
921 template<typename T
, size_t inlineCapacity
, typename Allocator
>
923 size_t Vector
<T
, inlineCapacity
, Allocator
>::reverseFind(const U
& value
) const
925 const T
* b
= begin();
926 const T
* iter
= end();
935 template<typename T
, size_t inlineCapacity
, typename Allocator
>
936 void Vector
<T
, inlineCapacity
, Allocator
>::fill(const T
& val
, size_t newSize
)
938 if (size() > newSize
)
940 else if (newSize
> capacity()) {
942 reserveCapacity(newSize
);
946 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size
, newSize
);
947 std::fill(begin(), end(), val
);
948 TypeOperations::uninitializedFill(end(), begin() + newSize
, val
);
952 template<typename T
, size_t inlineCapacity
, typename Allocator
>
953 template<typename Iterator
>
954 void Vector
<T
, inlineCapacity
, Allocator
>::appendRange(Iterator start
, Iterator end
)
956 for (Iterator it
= start
; it
!= end
; ++it
)
960 template<typename T
, size_t inlineCapacity
, typename Allocator
>
961 void Vector
<T
, inlineCapacity
, Allocator
>::expandCapacity(size_t newMinCapacity
)
963 size_t oldCapacity
= capacity();
964 size_t expandedCapacity
= oldCapacity
;
965 // We use a more aggressive expansion strategy for Vectors with inline storage.
966 // This is because they are more likely to be on the stack, so the risk of heap bloat is minimized.
967 // Furthermore, exceeding the inline capacity limit is not supposed to happen in the common case and may indicate a pathological condition or microbenchmark.
968 if (INLINE_CAPACITY
) {
969 expandedCapacity
*= 2;
970 // Check for integer overflow, which could happen in the 32-bit build.
971 RELEASE_ASSERT(expandedCapacity
> oldCapacity
);
973 // This cannot integer overflow.
974 // On 64-bit, the "expanded" integer is 32-bit, and any encroachment above 2^32 will fail allocation in allocateBuffer().
975 // On 32-bit, there's not enough address space to hold the old and new buffers.
976 // In addition, our underlying allocator is supposed to always fail on > (2^31 - 1) allocations.
977 expandedCapacity
+= (expandedCapacity
/ 4) + 1;
979 reserveCapacity(std::max(newMinCapacity
, std::max(static_cast<size_t>(kInitialVectorSize
), expandedCapacity
)));
982 template<typename T
, size_t inlineCapacity
, typename Allocator
>
983 const T
* Vector
<T
, inlineCapacity
, Allocator
>::expandCapacity(size_t newMinCapacity
, const T
* ptr
)
985 if (ptr
< begin() || ptr
>= end()) {
986 expandCapacity(newMinCapacity
);
989 size_t index
= ptr
- begin();
990 expandCapacity(newMinCapacity
);
991 return begin() + index
;
994 template<typename T
, size_t inlineCapacity
, typename Allocator
> template<typename U
>
995 inline U
* Vector
<T
, inlineCapacity
, Allocator
>::expandCapacity(size_t newMinCapacity
, U
* ptr
)
997 expandCapacity(newMinCapacity
);
1001 template<typename T
, size_t inlineCapacity
, typename Allocator
>
1002 inline void Vector
<T
, inlineCapacity
, Allocator
>::resize(size_t size
)
1004 if (size
<= m_size
) {
1005 TypeOperations::destruct(begin() + size
, end());
1006 clearUnusedSlots(begin() + size
, end());
1007 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size
, size
);
1009 if (size
> capacity())
1010 expandCapacity(size
);
1011 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size
, size
);
1012 TypeOperations::initialize(end(), begin() + size
);
1018 template<typename T
, size_t inlineCapacity
, typename Allocator
>
1019 void Vector
<T
, inlineCapacity
, Allocator
>::shrink(size_t size
)
1021 ASSERT(size
<= m_size
);
1022 TypeOperations::destruct(begin() + size
, end());
1023 clearUnusedSlots(begin() + size
, end());
1024 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size
, size
);
1028 template<typename T
, size_t inlineCapacity
, typename Allocator
>
1029 void Vector
<T
, inlineCapacity
, Allocator
>::grow(size_t size
)
1031 ASSERT(size
>= m_size
);
1032 if (size
> capacity())
1033 expandCapacity(size
);
1034 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size
, size
);
1035 TypeOperations::initialize(end(), begin() + size
);
1039 template<typename T
, size_t inlineCapacity
, typename Allocator
>
1040 void Vector
<T
, inlineCapacity
, Allocator
>::reserveCapacity(size_t newCapacity
)
1042 if (UNLIKELY(newCapacity
<= capacity()))
1044 T
* oldBuffer
= begin();
1046 Base::allocateBuffer(newCapacity
);
1049 #ifdef ANNOTATE_CONTIGUOUS_CONTAINER
1050 size_t oldCapacity
= capacity();
1052 // The Allocator::isGarbageCollected check is not needed.
1053 // The check is just a static hint for a compiler to indicate that
1054 // Base::expandBuffer returns false if Allocator is a DefaultAllocator.
1055 if (Allocator::isGarbageCollected
&& Base::expandBuffer(newCapacity
)) {
1056 ANNOTATE_CHANGE_CAPACITY(begin(), oldCapacity
, m_size
, capacity());
1060 Base::allocateExpandedBuffer(newCapacity
);
1061 ANNOTATE_NEW_BUFFER(begin(), capacity(), m_size
);
1062 TypeOperations::move(oldBuffer
, oldEnd
, begin());
1063 clearUnusedSlots(oldBuffer
, oldEnd
);
1064 ANNOTATE_DELETE_BUFFER(oldBuffer
, oldCapacity
, m_size
);
1065 Base::deallocateBuffer(oldBuffer
);
1068 template<typename T
, size_t inlineCapacity
, typename Allocator
>
1069 inline void Vector
<T
, inlineCapacity
, Allocator
>::reserveInitialCapacity(size_t initialCapacity
)
1072 ASSERT(capacity() == INLINE_CAPACITY
);
1073 if (initialCapacity
> INLINE_CAPACITY
) {
1074 ANNOTATE_DELETE_BUFFER(begin(), capacity(), m_size
);
1075 Base::allocateBuffer(initialCapacity
);
1076 ANNOTATE_NEW_BUFFER(begin(), capacity(), m_size
);
1080 template<typename T
, size_t inlineCapacity
, typename Allocator
>
1081 void Vector
<T
, inlineCapacity
, Allocator
>::shrinkCapacity(size_t newCapacity
)
1083 if (newCapacity
>= capacity())
1086 if (newCapacity
< size())
1087 shrink(newCapacity
);
1089 T
* oldBuffer
= begin();
1090 #ifdef ANNOTATE_CONTIGUOUS_CONTAINER
1091 size_t oldCapacity
= capacity();
1093 if (newCapacity
> 0) {
1094 if (Base::shrinkBuffer(newCapacity
)) {
1095 ANNOTATE_CHANGE_CAPACITY(begin(), oldCapacity
, m_size
, capacity());
1100 Base::allocateBuffer(newCapacity
);
1101 if (begin() != oldBuffer
) {
1102 ANNOTATE_NEW_BUFFER(begin(), capacity(), m_size
);
1103 TypeOperations::move(oldBuffer
, oldEnd
, begin());
1104 clearUnusedSlots(oldBuffer
, oldEnd
);
1105 ANNOTATE_DELETE_BUFFER(oldBuffer
, oldCapacity
, m_size
);
1108 Base::resetBufferPointer();
1109 #ifdef ANNOTATE_CONTIGUOUS_CONTAINER
1110 if (oldBuffer
!= begin()) {
1111 ANNOTATE_NEW_BUFFER(begin(), capacity(), m_size
);
1112 ANNOTATE_DELETE_BUFFER(oldBuffer
, oldCapacity
, m_size
);
1117 Base::deallocateBuffer(oldBuffer
);
1120 // Templatizing these is better than just letting the conversion happen implicitly,
1121 // because for instance it allows a PassRefPtr to be appended to a RefPtr vector
1122 // without refcount thrash.
1124 template<typename T
, size_t inlineCapacity
, typename Allocator
> template<typename U
>
1125 void Vector
<T
, inlineCapacity
, Allocator
>::append(const U
* data
, size_t dataSize
)
1127 ASSERT(Allocator::isAllocationAllowed());
1128 size_t newSize
= m_size
+ dataSize
;
1129 if (newSize
> capacity()) {
1130 data
= expandCapacity(newSize
, data
);
1133 RELEASE_ASSERT(newSize
>= m_size
);
1135 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size
, newSize
);
1136 VectorCopier
<VectorTraits
<T
>::canCopyWithMemcpy
, T
>::uninitializedCopy(data
, &data
[dataSize
], dest
);
1140 template<typename T
, size_t inlineCapacity
, typename Allocator
> template<typename U
>
1141 ALWAYS_INLINE
void Vector
<T
, inlineCapacity
, Allocator
>::append(const U
& val
)
1143 ASSERT(Allocator::isAllocationAllowed());
1144 if (LIKELY(size() != capacity())) {
1145 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size
, m_size
+ 1);
1146 new (NotNull
, end()) T(val
);
1151 appendSlowCase(val
);
1154 template<typename T
, size_t inlineCapacity
, typename Allocator
> template<typename U
>
1155 NEVER_INLINE
void Vector
<T
, inlineCapacity
, Allocator
>::appendSlowCase(const U
& val
)
1157 ASSERT(size() == capacity());
1159 const U
* ptr
= &val
;
1160 ptr
= expandCapacity(size() + 1, ptr
);
1163 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size
, m_size
+ 1);
1164 new (NotNull
, end()) T(*ptr
);
1168 // This version of append saves a branch in the case where you know that the
1169 // vector's capacity is large enough for the append to succeed.
1171 template<typename T
, size_t inlineCapacity
, typename Allocator
> template<typename U
>
1172 ALWAYS_INLINE
void Vector
<T
, inlineCapacity
, Allocator
>::uncheckedAppend(const U
& val
)
1174 #ifdef ANNOTATE_CONTIGUOUS_CONTAINER
1175 // Vectors in ASAN builds don't have inlineCapacity.
1178 ASSERT(size() < capacity());
1179 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size
, m_size
+ 1);
1180 const U
* ptr
= &val
;
1181 new (NotNull
, end()) T(*ptr
);
1186 template<typename T
, size_t inlineCapacity
, typename Allocator
> template<typename U
, size_t otherCapacity
, typename OtherAllocator
>
1187 inline void Vector
<T
, inlineCapacity
, Allocator
>::appendVector(const Vector
<U
, otherCapacity
, OtherAllocator
>& val
)
1189 append(val
.begin(), val
.size());
1192 template<typename T
, size_t inlineCapacity
, typename Allocator
> template<typename U
>
1193 void Vector
<T
, inlineCapacity
, Allocator
>::insert(size_t position
, const U
* data
, size_t dataSize
)
1195 ASSERT(Allocator::isAllocationAllowed());
1196 RELEASE_ASSERT(position
<= size());
1197 size_t newSize
= m_size
+ dataSize
;
1198 if (newSize
> capacity()) {
1199 data
= expandCapacity(newSize
, data
);
1202 RELEASE_ASSERT(newSize
>= m_size
);
1203 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size
, newSize
);
1204 T
* spot
= begin() + position
;
1205 TypeOperations::moveOverlapping(spot
, end(), spot
+ dataSize
);
1206 VectorCopier
<VectorTraits
<T
>::canCopyWithMemcpy
, T
>::uninitializedCopy(data
, &data
[dataSize
], spot
);
1210 template<typename T
, size_t inlineCapacity
, typename Allocator
> template<typename U
>
1211 inline void Vector
<T
, inlineCapacity
, Allocator
>::insert(size_t position
, const U
& val
)
1213 ASSERT(Allocator::isAllocationAllowed());
1214 RELEASE_ASSERT(position
<= size());
1215 const U
* data
= &val
;
1216 if (size() == capacity()) {
1217 data
= expandCapacity(size() + 1, data
);
1220 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size
, m_size
+ 1);
1221 T
* spot
= begin() + position
;
1222 TypeOperations::moveOverlapping(spot
, end(), spot
+ 1);
1223 new (NotNull
, spot
) T(*data
);
1227 template<typename T
, size_t inlineCapacity
, typename Allocator
> template<typename U
, size_t c
, typename OtherAllocator
>
1228 inline void Vector
<T
, inlineCapacity
, Allocator
>::insert(size_t position
, const Vector
<U
, c
, OtherAllocator
>& val
)
1230 insert(position
, val
.begin(), val
.size());
1233 template<typename T
, size_t inlineCapacity
, typename Allocator
> template<typename U
>
1234 void Vector
<T
, inlineCapacity
, Allocator
>::prepend(const U
* data
, size_t dataSize
)
1236 insert(0, data
, dataSize
);
1239 template<typename T
, size_t inlineCapacity
, typename Allocator
> template<typename U
>
1240 inline void Vector
<T
, inlineCapacity
, Allocator
>::prepend(const U
& val
)
1245 template<typename T
, size_t inlineCapacity
, typename Allocator
> template<typename U
, size_t c
, typename V
>
1246 inline void Vector
<T
, inlineCapacity
, Allocator
>::prepend(const Vector
<U
, c
, V
>& val
)
1248 insert(0, val
.begin(), val
.size());
1251 template<typename T
, size_t inlineCapacity
, typename Allocator
>
1252 inline void Vector
<T
, inlineCapacity
, Allocator
>::remove(size_t position
)
1254 RELEASE_ASSERT(position
< size());
1255 T
* spot
= begin() + position
;
1257 TypeOperations::moveOverlapping(spot
+ 1, end(), spot
);
1258 clearUnusedSlots(end() - 1, end());
1259 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size
, m_size
- 1);
1263 template<typename T
, size_t inlineCapacity
, typename Allocator
>
1264 inline void Vector
<T
, inlineCapacity
, Allocator
>::remove(size_t position
, size_t length
)
1266 ASSERT_WITH_SECURITY_IMPLICATION(position
<= size());
1267 RELEASE_ASSERT(position
+ length
<= size());
1268 T
* beginSpot
= begin() + position
;
1269 T
* endSpot
= beginSpot
+ length
;
1270 TypeOperations::destruct(beginSpot
, endSpot
);
1271 TypeOperations::moveOverlapping(endSpot
, end(), beginSpot
);
1272 clearUnusedSlots(end() - length
, end());
1273 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size
, m_size
- length
);
1277 template<typename T
, size_t inlineCapacity
, typename Allocator
>
1278 inline void Vector
<T
, inlineCapacity
, Allocator
>::reverse()
1280 for (size_t i
= 0; i
< m_size
/ 2; ++i
)
1281 std::swap(at(i
), at(m_size
- 1 - i
));
1284 template<typename T
, size_t inlineCapacity
, typename Allocator
>
1285 inline void swap(Vector
<T
, inlineCapacity
, Allocator
>& a
, Vector
<T
, inlineCapacity
, Allocator
>& b
)
1290 template<typename T
, size_t inlineCapacityA
, size_t inlineCapacityB
, typename Allocator
>
1291 bool operator==(const Vector
<T
, inlineCapacityA
, Allocator
>& a
, const Vector
<T
, inlineCapacityB
, Allocator
>& b
)
1293 if (a
.size() != b
.size())
1297 return VectorTypeOperations
<T
>::compare(a
.data(), b
.data(), a
.size());
1300 template<typename T
, size_t inlineCapacityA
, size_t inlineCapacityB
, typename Allocator
>
1301 inline bool operator!=(const Vector
<T
, inlineCapacityA
, Allocator
>& a
, const Vector
<T
, inlineCapacityB
, Allocator
>& b
)
1306 // This is only called if the allocator is a HeapAllocator. It is used when
1307 // visiting during a tracing GC.
1308 template<typename T
, size_t inlineCapacity
, typename Allocator
>
1309 template<typename VisitorDispatcher
>
1310 void Vector
<T
, inlineCapacity
, Allocator
>::trace(VisitorDispatcher visitor
)
1312 ASSERT(Allocator::isGarbageCollected
); // Garbage collector must be enabled.
1315 if (this->hasOutOfLineBuffer()) {
1316 // This is a performance optimization for a case where the buffer
1317 // has been already traced by somewhere. This can happen if
1318 // the conservative scanning traced an on-stack (false-positive
1319 // or real) pointer to the HeapVector, and then visitor->trace()
1320 // traces the HeapVector.
1321 if (Allocator::isHeapObjectAlive(buffer()))
1323 Allocator::markNoTracing(visitor
, buffer());
1325 const T
* bufferBegin
= buffer();
1326 const T
* bufferEnd
= buffer() + size();
1327 if (ShouldBeTraced
<VectorTraits
<T
>>::value
) {
1328 for (const T
* bufferEntry
= bufferBegin
; bufferEntry
!= bufferEnd
; bufferEntry
++)
1329 Allocator::template trace
<VisitorDispatcher
, T
, VectorTraits
<T
>>(visitor
, *const_cast<T
*>(bufferEntry
));
1330 checkUnusedSlots(buffer() + size(), buffer() + capacity());
1335 template<typename T
, size_t N
>
1336 struct NeedsTracing
<Vector
<T
, N
>> {
1337 static const bool value
= false;
1345 #endif // WTF_Vector_h