Merge Chromium + Blink git repositories
[chromium-blink-merge.git] / third_party / WebKit / Source / wtf / Vector.h
blob972c7377f9684de030b9897ff193694777ae002d
1 /*
2 * Copyright (C) 2005, 2006, 2007, 2008 Apple Inc. All rights reserved.
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
14 * You should have received a copy of the GNU Library General Public License
15 * along with this library; see the file COPYING.LIB. If not, write to
16 * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
17 * Boston, MA 02110-1301, USA.
21 #ifndef WTF_Vector_h
22 #define WTF_Vector_h
24 #include "wtf/Alignment.h"
25 #include "wtf/ConditionalDestructor.h"
26 #include "wtf/ContainerAnnotations.h"
27 #include "wtf/DefaultAllocator.h"
28 #include "wtf/FastAllocBase.h"
29 #include "wtf/Noncopyable.h"
30 #include "wtf/NotFound.h"
31 #include "wtf/StdLibExtras.h"
32 #include "wtf/VectorTraits.h"
33 #include <algorithm>
34 #include <iterator>
35 #include <string.h>
36 #include <utility>
38 // For ASAN builds, disable inline buffers completely as they cause various issues.
39 #ifdef ANNOTATE_CONTIGUOUS_CONTAINER
40 #define INLINE_CAPACITY 0
41 #else
42 #define INLINE_CAPACITY inlineCapacity
43 #endif
45 namespace WTF {
47 #if defined(MEMORY_SANITIZER_INITIAL_SIZE)
48 static const size_t kInitialVectorSize = 1;
49 #else
50 #ifndef WTF_VECTOR_INITIAL_SIZE
51 #define WTF_VECTOR_INITIAL_SIZE 4
52 #endif
53 static const size_t kInitialVectorSize = WTF_VECTOR_INITIAL_SIZE;
54 #endif
56 template<typename T, size_t inlineBuffer, typename Allocator>
57 class Deque;
59 template <bool needsDestruction, typename T>
60 struct VectorDestructor;
62 template<typename T>
63 struct VectorDestructor<false, T>
65 static void destruct(T*, T*) {}
68 template<typename T>
69 struct VectorDestructor<true, T>
71 static void destruct(T* begin, T* end)
73 for (T* cur = begin; cur != end; ++cur)
74 cur->~T();
78 template <bool unusedSlotsMustBeZeroed, typename T>
79 struct VectorUnusedSlotClearer;
81 template<typename T>
82 struct VectorUnusedSlotClearer<false, T> {
83 static void clear(T*, T*) { }
84 #if ENABLE(ASSERT)
85 static void checkCleared(const T*, const T*) { }
86 #endif
89 template<typename T>
90 struct VectorUnusedSlotClearer<true, T> {
91 static void clear(T* begin, T* end)
93 memset(reinterpret_cast<void*>(begin), 0, sizeof(T) * (end - begin));
96 #if ENABLE(ASSERT)
97 static void checkCleared(const T* begin, const T* end)
99 const unsigned char* unusedArea = reinterpret_cast<const unsigned char*>(begin);
100 const unsigned char* endAddress = reinterpret_cast<const unsigned char*>(end);
101 ASSERT(endAddress >= unusedArea);
102 for (int i = 0; i < endAddress - unusedArea; ++i)
103 ASSERT(!unusedArea[i]);
105 #endif
108 template <bool canInitializeWithMemset, typename T>
109 struct VectorInitializer;
111 template<typename T>
112 struct VectorInitializer<false, T>
114 static void initialize(T* begin, T* end)
116 for (T* cur = begin; cur != end; ++cur)
117 new (NotNull, cur) T;
121 template<typename T>
122 struct VectorInitializer<true, T>
124 static void initialize(T* begin, T* end)
126 memset(begin, 0, reinterpret_cast<char*>(end) - reinterpret_cast<char*>(begin));
130 template <bool canMoveWithMemcpy, typename T>
131 struct VectorMover;
133 template<typename T>
134 struct VectorMover<false, T>
136 static void move(const T* src, const T* srcEnd, T* dst)
138 while (src != srcEnd) {
139 new (NotNull, dst) T(*src);
140 src->~T();
141 ++dst;
142 ++src;
145 static void moveOverlapping(const T* src, const T* srcEnd, T* dst)
147 if (src > dst)
148 move(src, srcEnd, dst);
149 else {
150 T* dstEnd = dst + (srcEnd - src);
151 while (src != srcEnd) {
152 --srcEnd;
153 --dstEnd;
154 new (NotNull, dstEnd) T(*srcEnd);
155 srcEnd->~T();
159 static void swap(T* src, T* srcEnd, T* dst)
161 std::swap_ranges(src, srcEnd, dst);
165 template<typename T>
166 struct VectorMover<true, T>
168 static void move(const T* src, const T* srcEnd, T* dst)
170 if (LIKELY(dst && src))
171 memcpy(dst, src, reinterpret_cast<const char*>(srcEnd) - reinterpret_cast<const char*>(src));
173 static void moveOverlapping(const T* src, const T* srcEnd, T* dst)
175 if (LIKELY(dst && src))
176 memmove(dst, src, reinterpret_cast<const char*>(srcEnd) - reinterpret_cast<const char*>(src));
178 static void swap(T* src, T* srcEnd, T* dst)
180 std::swap_ranges(reinterpret_cast<char*>(src), reinterpret_cast<char*>(srcEnd), reinterpret_cast<char*>(dst));
184 template <bool canCopyWithMemcpy, typename T>
185 struct VectorCopier;
187 template<typename T>
188 struct VectorCopier<false, T>
190 template<typename U>
191 static void uninitializedCopy(const U* src, const U* srcEnd, T* dst)
193 while (src != srcEnd) {
194 new (NotNull, dst) T(*src);
195 ++dst;
196 ++src;
201 template<typename T>
202 struct VectorCopier<true, T>
204 static void uninitializedCopy(const T* src, const T* srcEnd, T* dst)
206 if (LIKELY(dst && src))
207 memcpy(dst, src, reinterpret_cast<const char*>(srcEnd) - reinterpret_cast<const char*>(src));
209 template<typename U>
210 static void uninitializedCopy(const U* src, const U* srcEnd, T* dst)
212 VectorCopier<false, T>::uninitializedCopy(src, srcEnd, dst);
216 template <bool canFillWithMemset, typename T>
217 struct VectorFiller;
219 template<typename T>
220 struct VectorFiller<false, T>
222 static void uninitializedFill(T* dst, T* dstEnd, const T& val)
224 while (dst != dstEnd) {
225 new (NotNull, dst) T(val);
226 ++dst;
231 template<typename T>
232 struct VectorFiller<true, T>
234 static void uninitializedFill(T* dst, T* dstEnd, const T& val)
236 static_assert(sizeof(T) == sizeof(char), "size of type should be one");
237 #if COMPILER(GCC) && defined(_FORTIFY_SOURCE)
238 if (!__builtin_constant_p(dstEnd - dst) || (!(dstEnd - dst)))
239 #endif
240 memset(dst, val, dstEnd - dst);
244 template<bool canCompareWithMemcmp, typename T>
245 struct VectorComparer;
247 template<typename T>
248 struct VectorComparer<false, T>
250 static bool compare(const T* a, const T* b, size_t size)
252 ASSERT(a);
253 ASSERT(b);
254 return std::equal(a, a + size, b);
258 template<typename T>
259 struct VectorComparer<true, T>
261 static bool compare(const T* a, const T* b, size_t size)
263 ASSERT(a);
264 ASSERT(b);
265 return memcmp(a, b, sizeof(T) * size) == 0;
269 template<typename T>
270 struct VectorTypeOperations
272 static void destruct(T* begin, T* end)
274 VectorDestructor<VectorTraits<T>::needsDestruction, T>::destruct(begin, end);
277 static void initialize(T* begin, T* end)
279 VectorInitializer<VectorTraits<T>::canInitializeWithMemset, T>::initialize(begin, end);
282 static void move(const T* src, const T* srcEnd, T* dst)
284 VectorMover<VectorTraits<T>::canMoveWithMemcpy, T>::move(src, srcEnd, dst);
287 static void moveOverlapping(const T* src, const T* srcEnd, T* dst)
289 VectorMover<VectorTraits<T>::canMoveWithMemcpy, T>::moveOverlapping(src, srcEnd, dst);
292 static void swap(T* src, T* srcEnd, T* dst)
294 VectorMover<VectorTraits<T>::canMoveWithMemcpy, T>::swap(src, srcEnd, dst);
297 static void uninitializedCopy(const T* src, const T* srcEnd, T* dst)
299 VectorCopier<VectorTraits<T>::canCopyWithMemcpy, T>::uninitializedCopy(src, srcEnd, dst);
302 static void uninitializedFill(T* dst, T* dstEnd, const T& val)
304 VectorFiller<VectorTraits<T>::canFillWithMemset, T>::uninitializedFill(dst, dstEnd, val);
307 static bool compare(const T* a, const T* b, size_t size)
309 return VectorComparer<VectorTraits<T>::canCompareWithMemcmp, T>::compare(a, b, size);
313 template<typename T, bool hasInlineCapacity, typename Allocator>
314 class VectorBufferBase {
315 WTF_MAKE_NONCOPYABLE(VectorBufferBase);
316 public:
317 void allocateBuffer(size_t newCapacity)
319 ASSERT(newCapacity);
320 size_t sizeToAllocate = allocationSize(newCapacity);
321 if (hasInlineCapacity)
322 m_buffer = Allocator::template allocateInlineVectorBacking<T>(sizeToAllocate);
323 else
324 m_buffer = Allocator::template allocateVectorBacking<T>(sizeToAllocate);
325 m_capacity = sizeToAllocate / sizeof(T);
328 void allocateExpandedBuffer(size_t newCapacity)
330 ASSERT(newCapacity);
331 size_t sizeToAllocate = allocationSize(newCapacity);
332 if (hasInlineCapacity)
333 m_buffer = Allocator::template allocateInlineVectorBacking<T>(sizeToAllocate);
334 else
335 m_buffer = Allocator::template allocateExpandedVectorBacking<T>(sizeToAllocate);
336 m_capacity = sizeToAllocate / sizeof(T);
339 size_t allocationSize(size_t capacity) const
341 return Allocator::template quantizedSize<T>(capacity);
344 T* buffer() { return m_buffer; }
345 const T* buffer() const { return m_buffer; }
346 size_t capacity() const { return m_capacity; }
348 void clearUnusedSlots(T* from, T* to)
350 // If the vector backing is garbage-collected and needs tracing
351 // or finalizing, we clear out the unused slots so that the visitor
352 // or the finalizer does not cause a problem when visiting the
353 // unused slots.
354 VectorUnusedSlotClearer<Allocator::isGarbageCollected && (VectorTraits<T>::needsDestruction || ShouldBeTraced<VectorTraits<T>>::value), T>::clear(from, to);
357 void checkUnusedSlots(const T* from, const T* to)
359 #if ENABLE(ASSERT) && !defined(ANNOTATE_CONTIGUOUS_CONTAINER)
360 VectorUnusedSlotClearer<Allocator::isGarbageCollected && (VectorTraits<T>::needsDestruction || ShouldBeTraced<VectorTraits<T>>::value), T>::checkCleared(from, to);
361 #endif
364 protected:
365 VectorBufferBase()
366 : m_buffer(0)
367 , m_capacity(0)
371 VectorBufferBase(T* buffer, size_t capacity)
372 : m_buffer(buffer)
373 , m_capacity(capacity)
377 T* m_buffer;
378 unsigned m_capacity;
379 unsigned m_size;
382 template<typename T, size_t inlineCapacity, typename Allocator = DefaultAllocator>
383 class VectorBuffer;
385 template<typename T, typename Allocator>
386 class VectorBuffer<T, 0, Allocator> : protected VectorBufferBase<T, false, Allocator> {
387 private:
388 typedef VectorBufferBase<T, false, Allocator> Base;
389 public:
390 VectorBuffer()
394 VectorBuffer(size_t capacity)
396 // Calling malloc(0) might take a lock and may actually do an
397 // allocation on some systems.
398 if (capacity)
399 allocateBuffer(capacity);
402 void destruct()
404 deallocateBuffer(m_buffer);
405 m_buffer = 0;
408 void deallocateBuffer(T* bufferToDeallocate)
410 Allocator::freeVectorBacking(bufferToDeallocate);
413 bool expandBuffer(size_t newCapacity)
415 size_t sizeToAllocate = allocationSize(newCapacity);
416 if (Allocator::expandVectorBacking(m_buffer, sizeToAllocate)) {
417 m_capacity = sizeToAllocate / sizeof(T);
418 return true;
420 return false;
423 inline bool shrinkBuffer(size_t newCapacity)
425 ASSERT(newCapacity < capacity());
426 size_t sizeToAllocate = allocationSize(newCapacity);
427 if (Allocator::shrinkVectorBacking(m_buffer, allocationSize(capacity()), sizeToAllocate)) {
428 m_capacity = sizeToAllocate / sizeof(T);
429 return true;
431 return false;
434 void resetBufferPointer()
436 m_buffer = 0;
437 m_capacity = 0;
440 void swapVectorBuffer(VectorBuffer<T, 0, Allocator>& other)
442 std::swap(m_buffer, other.m_buffer);
443 std::swap(m_capacity, other.m_capacity);
446 using Base::allocateBuffer;
447 using Base::allocationSize;
449 using Base::buffer;
450 using Base::capacity;
452 using Base::clearUnusedSlots;
453 using Base::checkUnusedSlots;
455 bool hasOutOfLineBuffer() const
457 // When inlineCapacity is 0 we have an out of line buffer if we have a buffer.
458 return buffer();
461 protected:
462 using Base::m_size;
464 private:
465 using Base::m_buffer;
466 using Base::m_capacity;
469 template<typename T, size_t inlineCapacity, typename Allocator>
470 class VectorBuffer : protected VectorBufferBase<T, true, Allocator> {
471 WTF_MAKE_NONCOPYABLE(VectorBuffer);
472 private:
473 typedef VectorBufferBase<T, true, Allocator> Base;
474 public:
475 VectorBuffer()
476 : Base(inlineBuffer(), inlineCapacity)
480 VectorBuffer(size_t capacity)
481 : Base(inlineBuffer(), inlineCapacity)
483 if (capacity > inlineCapacity)
484 Base::allocateBuffer(capacity);
487 void destruct()
489 deallocateBuffer(m_buffer);
490 m_buffer = 0;
493 NEVER_INLINE void reallyDeallocateBuffer(T* bufferToDeallocate)
495 Allocator::freeInlineVectorBacking(bufferToDeallocate);
498 void deallocateBuffer(T* bufferToDeallocate)
500 if (UNLIKELY(bufferToDeallocate != inlineBuffer()))
501 reallyDeallocateBuffer(bufferToDeallocate);
504 bool expandBuffer(size_t newCapacity)
506 ASSERT(newCapacity > inlineCapacity);
507 if (m_buffer == inlineBuffer())
508 return false;
510 size_t sizeToAllocate = allocationSize(newCapacity);
511 if (Allocator::expandInlineVectorBacking(m_buffer, sizeToAllocate)) {
512 m_capacity = sizeToAllocate / sizeof(T);
513 return true;
515 return false;
518 inline bool shrinkBuffer(size_t newCapacity)
520 ASSERT(newCapacity < capacity());
521 if (newCapacity <= inlineCapacity) {
522 // We need to switch to inlineBuffer. Vector::shrinkCapacity
523 // will handle it.
524 return false;
526 ASSERT(m_buffer != inlineBuffer());
527 size_t newSize = allocationSize(newCapacity);
528 if (!Allocator::shrinkInlineVectorBacking(m_buffer, allocationSize(capacity()), newSize))
529 return false;
530 m_capacity = newSize / sizeof(T);
531 return true;
534 void resetBufferPointer()
536 m_buffer = inlineBuffer();
537 m_capacity = inlineCapacity;
540 void allocateBuffer(size_t newCapacity)
542 // FIXME: This should ASSERT(!m_buffer) to catch misuse/leaks.
543 if (newCapacity > inlineCapacity)
544 Base::allocateBuffer(newCapacity);
545 else
546 resetBufferPointer();
549 void allocateExpandedBuffer(size_t newCapacity)
551 if (newCapacity > inlineCapacity)
552 Base::allocateExpandedBuffer(newCapacity);
553 else
554 resetBufferPointer();
557 size_t allocationSize(size_t capacity) const
559 if (capacity <= inlineCapacity)
560 return m_inlineBufferSize;
561 return Base::allocationSize(capacity);
564 void swapVectorBuffer(VectorBuffer<T, inlineCapacity, Allocator>& other)
566 typedef VectorTypeOperations<T> TypeOperations;
568 if (buffer() == inlineBuffer() && other.buffer() == other.inlineBuffer()) {
569 ASSERT(m_capacity == other.m_capacity);
570 if (m_size > other.m_size) {
571 ANNOTATE_CHANGE_SIZE(other.inlineBuffer(), inlineCapacity, other.m_size, m_size);
572 TypeOperations::swap(inlineBuffer(), inlineBuffer() + other.m_size, other.inlineBuffer());
573 TypeOperations::move(inlineBuffer() + other.m_size, inlineBuffer() + m_size, other.inlineBuffer() + other.m_size);
574 Base::clearUnusedSlots(inlineBuffer() + other.m_size, inlineBuffer() + m_size);
575 ANNOTATE_CHANGE_SIZE(inlineBuffer(), inlineCapacity, m_size, other.m_size);
576 } else {
577 ANNOTATE_CHANGE_SIZE(inlineBuffer(), inlineCapacity, m_size, other.m_size);
578 TypeOperations::swap(inlineBuffer(), inlineBuffer() + m_size, other.inlineBuffer());
579 TypeOperations::move(other.inlineBuffer() + m_size, other.inlineBuffer() + other.m_size, inlineBuffer() + m_size);
580 Base::clearUnusedSlots(other.inlineBuffer() + m_size, other.inlineBuffer() + other.m_size);
581 ANNOTATE_CHANGE_SIZE(other.inlineBuffer(), inlineCapacity, other.m_size, m_size);
583 } else if (buffer() == inlineBuffer()) {
584 ANNOTATE_DELETE_BUFFER(m_buffer, inlineCapacity, m_size);
585 m_buffer = other.m_buffer;
586 other.m_buffer = other.inlineBuffer();
587 ANNOTATE_NEW_BUFFER(other.m_buffer, inlineCapacity, m_size);
588 TypeOperations::move(inlineBuffer(), inlineBuffer() + m_size, other.inlineBuffer());
589 Base::clearUnusedSlots(inlineBuffer(), inlineBuffer() + m_size);
590 std::swap(m_capacity, other.m_capacity);
591 } else if (other.buffer() == other.inlineBuffer()) {
592 ANNOTATE_DELETE_BUFFER(other.m_buffer, inlineCapacity, other.m_size);
593 other.m_buffer = m_buffer;
594 m_buffer = inlineBuffer();
595 ANNOTATE_NEW_BUFFER(m_buffer, inlineCapacity, other.m_size);
596 TypeOperations::move(other.inlineBuffer(), other.inlineBuffer() + other.m_size, inlineBuffer());
597 Base::clearUnusedSlots(other.inlineBuffer(), other.inlineBuffer() + other.m_size);
598 std::swap(m_capacity, other.m_capacity);
599 } else {
600 std::swap(m_buffer, other.m_buffer);
601 std::swap(m_capacity, other.m_capacity);
605 using Base::buffer;
606 using Base::capacity;
608 bool hasOutOfLineBuffer() const
610 return buffer() && buffer() != inlineBuffer();
613 protected:
614 using Base::m_size;
616 private:
617 using Base::m_buffer;
618 using Base::m_capacity;
620 static const size_t m_inlineBufferSize = inlineCapacity * sizeof(T);
621 T* inlineBuffer() { return reinterpret_cast_ptr<T*>(m_inlineBuffer.buffer); }
622 const T* inlineBuffer() const { return reinterpret_cast_ptr<const T*>(m_inlineBuffer.buffer); }
624 AlignedBuffer<m_inlineBufferSize, WTF_ALIGN_OF(T)> m_inlineBuffer;
625 template<typename U, size_t inlineBuffer, typename V>
626 friend class Deque;
629 template<typename T, size_t inlineCapacity = 0, typename Allocator = DefaultAllocator> // Heap-allocated vectors with no inlineCapacity never need a destructor.
630 class Vector : private VectorBuffer<T, INLINE_CAPACITY, Allocator>, public ConditionalDestructor<Vector<T, INLINE_CAPACITY, Allocator>, (INLINE_CAPACITY == 0) && Allocator::isGarbageCollected> {
631 WTF_USE_ALLOCATOR(Vector, Allocator);
632 private:
633 typedef VectorBuffer<T, INLINE_CAPACITY, Allocator> Base;
634 typedef VectorTypeOperations<T> TypeOperations;
636 public:
637 typedef T ValueType;
639 typedef T* iterator;
640 typedef const T* const_iterator;
641 typedef std::reverse_iterator<iterator> reverse_iterator;
642 typedef std::reverse_iterator<const_iterator> const_reverse_iterator;
644 Vector()
646 static_assert(!IsPolymorphic<T>::value || !VectorTraits<T>::canInitializeWithMemset, "Cannot initialize with memset if there is a vtable");
647 #if ENABLE(OILPAN)
648 static_assert(Allocator::isGarbageCollected || !IsAllowOnlyInlineAllocation<T>::value || !NeedsTracing<T>::value, "Cannot put ALLOW_ONLY_INLINE_ALLOCATION objects that have trace methods into an off-heap Vector");
649 #endif
650 ANNOTATE_NEW_BUFFER(begin(), capacity(), 0);
651 m_size = 0;
654 explicit Vector(size_t size)
655 : Base(size)
657 static_assert(!IsPolymorphic<T>::value || !VectorTraits<T>::canInitializeWithMemset, "Cannot initialize with memset if there is a vtable");
658 #if ENABLE(OILPAN)
659 static_assert(Allocator::isGarbageCollected || !IsAllowOnlyInlineAllocation<T>::value || !NeedsTracing<T>::value, "Cannot put ALLOW_ONLY_INLINE_ALLOCATION objects that have trace methods into an off-heap Vector");
660 #endif
661 ANNOTATE_NEW_BUFFER(begin(), capacity(), size);
662 m_size = size;
663 TypeOperations::initialize(begin(), end());
666 // Off-GC-heap vectors: Destructor should be called.
667 // On-GC-heap vectors: Destructor should be called for inline buffers
668 // (if any) but destructor shouldn't be called for vector backing since
669 // it is managed by the traced GC heap.
670 void finalize()
672 if (!INLINE_CAPACITY) {
673 if (LIKELY(!Base::buffer()))
674 return;
676 ANNOTATE_DELETE_BUFFER(begin(), capacity(), m_size);
677 if (LIKELY(m_size) && !(Allocator::isGarbageCollected && this->hasOutOfLineBuffer())) {
678 TypeOperations::destruct(begin(), end());
679 m_size = 0; // Partial protection against use-after-free.
682 Base::destruct();
685 void finalizeGarbageCollectedObject()
687 finalize();
690 Vector(const Vector&);
691 template<size_t otherCapacity>
692 explicit Vector(const Vector<T, otherCapacity, Allocator>&);
694 Vector& operator=(const Vector&);
695 template<size_t otherCapacity>
696 Vector& operator=(const Vector<T, otherCapacity, Allocator>&);
698 Vector(Vector&&);
699 Vector& operator=(Vector&&);
701 size_t size() const { return m_size; }
702 size_t capacity() const { return Base::capacity(); }
703 bool isEmpty() const { return !size(); }
705 T& at(size_t i)
707 RELEASE_ASSERT(i < size());
708 return Base::buffer()[i];
710 const T& at(size_t i) const
712 RELEASE_ASSERT(i < size());
713 return Base::buffer()[i];
716 T& operator[](size_t i) { return at(i); }
717 const T& operator[](size_t i) const { return at(i); }
719 T* data() { return Base::buffer(); }
720 const T* data() const { return Base::buffer(); }
722 iterator begin() { return data(); }
723 iterator end() { return begin() + m_size; }
724 const_iterator begin() const { return data(); }
725 const_iterator end() const { return begin() + m_size; }
727 reverse_iterator rbegin() { return reverse_iterator(end()); }
728 reverse_iterator rend() { return reverse_iterator(begin()); }
729 const_reverse_iterator rbegin() const { return const_reverse_iterator(end()); }
730 const_reverse_iterator rend() const { return const_reverse_iterator(begin()); }
732 T& first() { return at(0); }
733 const T& first() const { return at(0); }
734 T& last() { return at(size() - 1); }
735 const T& last() const { return at(size() - 1); }
737 template<typename U> bool contains(const U&) const;
738 template<typename U> size_t find(const U&) const;
739 template<typename U> size_t reverseFind(const U&) const;
741 void shrink(size_t size);
742 void grow(size_t size);
743 void resize(size_t size);
744 void reserveCapacity(size_t newCapacity);
745 void reserveInitialCapacity(size_t initialCapacity);
746 void shrinkToFit() { shrinkCapacity(size()); }
747 void shrinkToReasonableCapacity()
749 if (size() * 2 < capacity())
750 shrinkCapacity(size() + size() / 4 + 1);
753 void clear() { shrinkCapacity(0); }
755 template<typename U> void append(const U*, size_t);
756 template<typename U> void append(const U&);
757 template<typename U> void uncheckedAppend(const U& val);
758 template<typename U, size_t otherCapacity, typename V> void appendVector(const Vector<U, otherCapacity, V>&);
760 template<typename U> void insert(size_t position, const U*, size_t);
761 template<typename U> void insert(size_t position, const U&);
762 template<typename U, size_t c, typename V> void insert(size_t position, const Vector<U, c, V>&);
764 template<typename U> void prepend(const U*, size_t);
765 template<typename U> void prepend(const U&);
766 template<typename U, size_t c, typename V> void prepend(const Vector<U, c, V>&);
768 void remove(size_t position);
769 void remove(size_t position, size_t length);
771 void removeLast()
773 ASSERT(!isEmpty());
774 shrink(size() - 1);
777 Vector(size_t size, const T& val)
778 : Base(size)
780 ANNOTATE_NEW_BUFFER(begin(), capacity(), size);
781 m_size = size;
782 TypeOperations::uninitializedFill(begin(), end(), val);
785 void fill(const T&, size_t);
786 void fill(const T& val) { fill(val, size()); }
788 template<typename Iterator> void appendRange(Iterator start, Iterator end);
790 void swap(Vector& other)
792 Base::swapVectorBuffer(other);
793 std::swap(m_size, other.m_size);
796 void reverse();
798 template<typename VisitorDispatcher> void trace(VisitorDispatcher);
800 private:
801 void expandCapacity(size_t newMinCapacity);
802 const T* expandCapacity(size_t newMinCapacity, const T*);
803 template<typename U> U* expandCapacity(size_t newMinCapacity, U*);
804 void shrinkCapacity(size_t newCapacity);
805 template<typename U> void appendSlowCase(const U&);
807 using Base::m_size;
808 using Base::buffer;
809 using Base::capacity;
810 using Base::swapVectorBuffer;
811 using Base::allocateBuffer;
812 using Base::allocationSize;
813 using Base::clearUnusedSlots;
814 using Base::checkUnusedSlots;
817 template<typename T, size_t inlineCapacity, typename Allocator>
818 Vector<T, inlineCapacity, Allocator>::Vector(const Vector& other)
819 : Base(other.capacity())
821 ANNOTATE_NEW_BUFFER(begin(), capacity(), other.size());
822 m_size = other.size();
823 TypeOperations::uninitializedCopy(other.begin(), other.end(), begin());
826 template<typename T, size_t inlineCapacity, typename Allocator>
827 template<size_t otherCapacity>
828 Vector<T, inlineCapacity, Allocator>::Vector(const Vector<T, otherCapacity, Allocator>& other)
829 : Base(other.capacity())
831 ANNOTATE_NEW_BUFFER(begin(), capacity(), other.size());
832 m_size = other.size();
833 TypeOperations::uninitializedCopy(other.begin(), other.end(), begin());
836 template<typename T, size_t inlineCapacity, typename Allocator>
837 Vector<T, inlineCapacity, Allocator>& Vector<T, inlineCapacity, Allocator>::operator=(const Vector<T, inlineCapacity, Allocator>& other)
839 if (UNLIKELY(&other == this))
840 return *this;
842 if (size() > other.size())
843 shrink(other.size());
844 else if (other.size() > capacity()) {
845 clear();
846 reserveCapacity(other.size());
847 ASSERT(begin());
850 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, other.size());
851 std::copy(other.begin(), other.begin() + size(), begin());
852 TypeOperations::uninitializedCopy(other.begin() + size(), other.end(), end());
853 m_size = other.size();
855 return *this;
858 inline bool typelessPointersAreEqual(const void* a, const void* b) { return a == b; }
860 template<typename T, size_t inlineCapacity, typename Allocator>
861 template<size_t otherCapacity>
862 Vector<T, inlineCapacity, Allocator>& Vector<T, inlineCapacity, Allocator>::operator=(const Vector<T, otherCapacity, Allocator>& other)
864 // If the inline capacities match, we should call the more specific
865 // template. If the inline capacities don't match, the two objects
866 // shouldn't be allocated the same address.
867 ASSERT(!typelessPointersAreEqual(&other, this));
869 if (size() > other.size())
870 shrink(other.size());
871 else if (other.size() > capacity()) {
872 clear();
873 reserveCapacity(other.size());
874 ASSERT(begin());
877 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, other.size());
878 std::copy(other.begin(), other.begin() + size(), begin());
879 TypeOperations::uninitializedCopy(other.begin() + size(), other.end(), end());
880 m_size = other.size();
882 return *this;
885 template<typename T, size_t inlineCapacity, typename Allocator>
886 Vector<T, inlineCapacity, Allocator>::Vector(Vector<T, inlineCapacity, Allocator>&& other)
888 m_size = 0;
889 // It's a little weird to implement a move constructor using swap but this way we
890 // don't have to add a move constructor to VectorBuffer.
891 swap(other);
894 template<typename T, size_t inlineCapacity, typename Allocator>
895 Vector<T, inlineCapacity, Allocator>& Vector<T, inlineCapacity, Allocator>::operator=(Vector<T, inlineCapacity, Allocator>&& other)
897 swap(other);
898 return *this;
901 template<typename T, size_t inlineCapacity, typename Allocator>
902 template<typename U>
903 bool Vector<T, inlineCapacity, Allocator>::contains(const U& value) const
905 return find(value) != kNotFound;
908 template<typename T, size_t inlineCapacity, typename Allocator>
909 template<typename U>
910 size_t Vector<T, inlineCapacity, Allocator>::find(const U& value) const
912 const T* b = begin();
913 const T* e = end();
914 for (const T* iter = b; iter < e; ++iter) {
915 if (*iter == value)
916 return iter - b;
918 return kNotFound;
921 template<typename T, size_t inlineCapacity, typename Allocator>
922 template<typename U>
923 size_t Vector<T, inlineCapacity, Allocator>::reverseFind(const U& value) const
925 const T* b = begin();
926 const T* iter = end();
927 while (iter > b) {
928 --iter;
929 if (*iter == value)
930 return iter - b;
932 return kNotFound;
935 template<typename T, size_t inlineCapacity, typename Allocator>
936 void Vector<T, inlineCapacity, Allocator>::fill(const T& val, size_t newSize)
938 if (size() > newSize)
939 shrink(newSize);
940 else if (newSize > capacity()) {
941 clear();
942 reserveCapacity(newSize);
943 ASSERT(begin());
946 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, newSize);
947 std::fill(begin(), end(), val);
948 TypeOperations::uninitializedFill(end(), begin() + newSize, val);
949 m_size = newSize;
952 template<typename T, size_t inlineCapacity, typename Allocator>
953 template<typename Iterator>
954 void Vector<T, inlineCapacity, Allocator>::appendRange(Iterator start, Iterator end)
956 for (Iterator it = start; it != end; ++it)
957 append(*it);
960 template<typename T, size_t inlineCapacity, typename Allocator>
961 void Vector<T, inlineCapacity, Allocator>::expandCapacity(size_t newMinCapacity)
963 size_t oldCapacity = capacity();
964 size_t expandedCapacity = oldCapacity;
965 // We use a more aggressive expansion strategy for Vectors with inline storage.
966 // This is because they are more likely to be on the stack, so the risk of heap bloat is minimized.
967 // Furthermore, exceeding the inline capacity limit is not supposed to happen in the common case and may indicate a pathological condition or microbenchmark.
968 if (INLINE_CAPACITY) {
969 expandedCapacity *= 2;
970 // Check for integer overflow, which could happen in the 32-bit build.
971 RELEASE_ASSERT(expandedCapacity > oldCapacity);
972 } else {
973 // This cannot integer overflow.
974 // On 64-bit, the "expanded" integer is 32-bit, and any encroachment above 2^32 will fail allocation in allocateBuffer().
975 // On 32-bit, there's not enough address space to hold the old and new buffers.
976 // In addition, our underlying allocator is supposed to always fail on > (2^31 - 1) allocations.
977 expandedCapacity += (expandedCapacity / 4) + 1;
979 reserveCapacity(std::max(newMinCapacity, std::max(static_cast<size_t>(kInitialVectorSize), expandedCapacity)));
982 template<typename T, size_t inlineCapacity, typename Allocator>
983 const T* Vector<T, inlineCapacity, Allocator>::expandCapacity(size_t newMinCapacity, const T* ptr)
985 if (ptr < begin() || ptr >= end()) {
986 expandCapacity(newMinCapacity);
987 return ptr;
989 size_t index = ptr - begin();
990 expandCapacity(newMinCapacity);
991 return begin() + index;
994 template<typename T, size_t inlineCapacity, typename Allocator> template<typename U>
995 inline U* Vector<T, inlineCapacity, Allocator>::expandCapacity(size_t newMinCapacity, U* ptr)
997 expandCapacity(newMinCapacity);
998 return ptr;
1001 template<typename T, size_t inlineCapacity, typename Allocator>
1002 inline void Vector<T, inlineCapacity, Allocator>::resize(size_t size)
1004 if (size <= m_size) {
1005 TypeOperations::destruct(begin() + size, end());
1006 clearUnusedSlots(begin() + size, end());
1007 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, size);
1008 } else {
1009 if (size > capacity())
1010 expandCapacity(size);
1011 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, size);
1012 TypeOperations::initialize(end(), begin() + size);
1015 m_size = size;
1018 template<typename T, size_t inlineCapacity, typename Allocator>
1019 void Vector<T, inlineCapacity, Allocator>::shrink(size_t size)
1021 ASSERT(size <= m_size);
1022 TypeOperations::destruct(begin() + size, end());
1023 clearUnusedSlots(begin() + size, end());
1024 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, size);
1025 m_size = size;
1028 template<typename T, size_t inlineCapacity, typename Allocator>
1029 void Vector<T, inlineCapacity, Allocator>::grow(size_t size)
1031 ASSERT(size >= m_size);
1032 if (size > capacity())
1033 expandCapacity(size);
1034 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, size);
1035 TypeOperations::initialize(end(), begin() + size);
1036 m_size = size;
1039 template<typename T, size_t inlineCapacity, typename Allocator>
1040 void Vector<T, inlineCapacity, Allocator>::reserveCapacity(size_t newCapacity)
1042 if (UNLIKELY(newCapacity <= capacity()))
1043 return;
1044 T* oldBuffer = begin();
1045 if (!oldBuffer) {
1046 Base::allocateBuffer(newCapacity);
1047 return;
1049 #ifdef ANNOTATE_CONTIGUOUS_CONTAINER
1050 size_t oldCapacity = capacity();
1051 #endif
1052 // The Allocator::isGarbageCollected check is not needed.
1053 // The check is just a static hint for a compiler to indicate that
1054 // Base::expandBuffer returns false if Allocator is a DefaultAllocator.
1055 if (Allocator::isGarbageCollected && Base::expandBuffer(newCapacity)) {
1056 ANNOTATE_CHANGE_CAPACITY(begin(), oldCapacity, m_size, capacity());
1057 return;
1059 T* oldEnd = end();
1060 Base::allocateExpandedBuffer(newCapacity);
1061 ANNOTATE_NEW_BUFFER(begin(), capacity(), m_size);
1062 TypeOperations::move(oldBuffer, oldEnd, begin());
1063 clearUnusedSlots(oldBuffer, oldEnd);
1064 ANNOTATE_DELETE_BUFFER(oldBuffer, oldCapacity, m_size);
1065 Base::deallocateBuffer(oldBuffer);
1068 template<typename T, size_t inlineCapacity, typename Allocator>
1069 inline void Vector<T, inlineCapacity, Allocator>::reserveInitialCapacity(size_t initialCapacity)
1071 ASSERT(!m_size);
1072 ASSERT(capacity() == INLINE_CAPACITY);
1073 if (initialCapacity > INLINE_CAPACITY) {
1074 ANNOTATE_DELETE_BUFFER(begin(), capacity(), m_size);
1075 Base::allocateBuffer(initialCapacity);
1076 ANNOTATE_NEW_BUFFER(begin(), capacity(), m_size);
1080 template<typename T, size_t inlineCapacity, typename Allocator>
1081 void Vector<T, inlineCapacity, Allocator>::shrinkCapacity(size_t newCapacity)
1083 if (newCapacity >= capacity())
1084 return;
1086 if (newCapacity < size())
1087 shrink(newCapacity);
1089 T* oldBuffer = begin();
1090 #ifdef ANNOTATE_CONTIGUOUS_CONTAINER
1091 size_t oldCapacity = capacity();
1092 #endif
1093 if (newCapacity > 0) {
1094 if (Base::shrinkBuffer(newCapacity)) {
1095 ANNOTATE_CHANGE_CAPACITY(begin(), oldCapacity, m_size, capacity());
1096 return;
1099 T* oldEnd = end();
1100 Base::allocateBuffer(newCapacity);
1101 if (begin() != oldBuffer) {
1102 ANNOTATE_NEW_BUFFER(begin(), capacity(), m_size);
1103 TypeOperations::move(oldBuffer, oldEnd, begin());
1104 clearUnusedSlots(oldBuffer, oldEnd);
1105 ANNOTATE_DELETE_BUFFER(oldBuffer, oldCapacity, m_size);
1107 } else {
1108 Base::resetBufferPointer();
1109 #ifdef ANNOTATE_CONTIGUOUS_CONTAINER
1110 if (oldBuffer != begin()) {
1111 ANNOTATE_NEW_BUFFER(begin(), capacity(), m_size);
1112 ANNOTATE_DELETE_BUFFER(oldBuffer, oldCapacity, m_size);
1114 #endif
1117 Base::deallocateBuffer(oldBuffer);
1120 // Templatizing these is better than just letting the conversion happen implicitly,
1121 // because for instance it allows a PassRefPtr to be appended to a RefPtr vector
1122 // without refcount thrash.
1124 template<typename T, size_t inlineCapacity, typename Allocator> template<typename U>
1125 void Vector<T, inlineCapacity, Allocator>::append(const U* data, size_t dataSize)
1127 ASSERT(Allocator::isAllocationAllowed());
1128 size_t newSize = m_size + dataSize;
1129 if (newSize > capacity()) {
1130 data = expandCapacity(newSize, data);
1131 ASSERT(begin());
1133 RELEASE_ASSERT(newSize >= m_size);
1134 T* dest = end();
1135 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, newSize);
1136 VectorCopier<VectorTraits<T>::canCopyWithMemcpy, T>::uninitializedCopy(data, &data[dataSize], dest);
1137 m_size = newSize;
1140 template<typename T, size_t inlineCapacity, typename Allocator> template<typename U>
1141 ALWAYS_INLINE void Vector<T, inlineCapacity, Allocator>::append(const U& val)
1143 ASSERT(Allocator::isAllocationAllowed());
1144 if (LIKELY(size() != capacity())) {
1145 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, m_size + 1);
1146 new (NotNull, end()) T(val);
1147 ++m_size;
1148 return;
1151 appendSlowCase(val);
1154 template<typename T, size_t inlineCapacity, typename Allocator> template<typename U>
1155 NEVER_INLINE void Vector<T, inlineCapacity, Allocator>::appendSlowCase(const U& val)
1157 ASSERT(size() == capacity());
1159 const U* ptr = &val;
1160 ptr = expandCapacity(size() + 1, ptr);
1161 ASSERT(begin());
1163 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, m_size + 1);
1164 new (NotNull, end()) T(*ptr);
1165 ++m_size;
1168 // This version of append saves a branch in the case where you know that the
1169 // vector's capacity is large enough for the append to succeed.
1171 template<typename T, size_t inlineCapacity, typename Allocator> template<typename U>
1172 ALWAYS_INLINE void Vector<T, inlineCapacity, Allocator>::uncheckedAppend(const U& val)
1174 #ifdef ANNOTATE_CONTIGUOUS_CONTAINER
1175 // Vectors in ASAN builds don't have inlineCapacity.
1176 append(val);
1177 #else
1178 ASSERT(size() < capacity());
1179 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, m_size + 1);
1180 const U* ptr = &val;
1181 new (NotNull, end()) T(*ptr);
1182 ++m_size;
1183 #endif
1186 template<typename T, size_t inlineCapacity, typename Allocator> template<typename U, size_t otherCapacity, typename OtherAllocator>
1187 inline void Vector<T, inlineCapacity, Allocator>::appendVector(const Vector<U, otherCapacity, OtherAllocator>& val)
1189 append(val.begin(), val.size());
1192 template<typename T, size_t inlineCapacity, typename Allocator> template<typename U>
1193 void Vector<T, inlineCapacity, Allocator>::insert(size_t position, const U* data, size_t dataSize)
1195 ASSERT(Allocator::isAllocationAllowed());
1196 RELEASE_ASSERT(position <= size());
1197 size_t newSize = m_size + dataSize;
1198 if (newSize > capacity()) {
1199 data = expandCapacity(newSize, data);
1200 ASSERT(begin());
1202 RELEASE_ASSERT(newSize >= m_size);
1203 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, newSize);
1204 T* spot = begin() + position;
1205 TypeOperations::moveOverlapping(spot, end(), spot + dataSize);
1206 VectorCopier<VectorTraits<T>::canCopyWithMemcpy, T>::uninitializedCopy(data, &data[dataSize], spot);
1207 m_size = newSize;
1210 template<typename T, size_t inlineCapacity, typename Allocator> template<typename U>
1211 inline void Vector<T, inlineCapacity, Allocator>::insert(size_t position, const U& val)
1213 ASSERT(Allocator::isAllocationAllowed());
1214 RELEASE_ASSERT(position <= size());
1215 const U* data = &val;
1216 if (size() == capacity()) {
1217 data = expandCapacity(size() + 1, data);
1218 ASSERT(begin());
1220 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, m_size + 1);
1221 T* spot = begin() + position;
1222 TypeOperations::moveOverlapping(spot, end(), spot + 1);
1223 new (NotNull, spot) T(*data);
1224 ++m_size;
1227 template<typename T, size_t inlineCapacity, typename Allocator> template<typename U, size_t c, typename OtherAllocator>
1228 inline void Vector<T, inlineCapacity, Allocator>::insert(size_t position, const Vector<U, c, OtherAllocator>& val)
1230 insert(position, val.begin(), val.size());
1233 template<typename T, size_t inlineCapacity, typename Allocator> template<typename U>
1234 void Vector<T, inlineCapacity, Allocator>::prepend(const U* data, size_t dataSize)
1236 insert(0, data, dataSize);
1239 template<typename T, size_t inlineCapacity, typename Allocator> template<typename U>
1240 inline void Vector<T, inlineCapacity, Allocator>::prepend(const U& val)
1242 insert(0, val);
1245 template<typename T, size_t inlineCapacity, typename Allocator> template<typename U, size_t c, typename V>
1246 inline void Vector<T, inlineCapacity, Allocator>::prepend(const Vector<U, c, V>& val)
1248 insert(0, val.begin(), val.size());
1251 template<typename T, size_t inlineCapacity, typename Allocator>
1252 inline void Vector<T, inlineCapacity, Allocator>::remove(size_t position)
1254 RELEASE_ASSERT(position < size());
1255 T* spot = begin() + position;
1256 spot->~T();
1257 TypeOperations::moveOverlapping(spot + 1, end(), spot);
1258 clearUnusedSlots(end() - 1, end());
1259 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, m_size - 1);
1260 --m_size;
1263 template<typename T, size_t inlineCapacity, typename Allocator>
1264 inline void Vector<T, inlineCapacity, Allocator>::remove(size_t position, size_t length)
1266 ASSERT_WITH_SECURITY_IMPLICATION(position <= size());
1267 RELEASE_ASSERT(position + length <= size());
1268 T* beginSpot = begin() + position;
1269 T* endSpot = beginSpot + length;
1270 TypeOperations::destruct(beginSpot, endSpot);
1271 TypeOperations::moveOverlapping(endSpot, end(), beginSpot);
1272 clearUnusedSlots(end() - length, end());
1273 ANNOTATE_CHANGE_SIZE(begin(), capacity(), m_size, m_size - length);
1274 m_size -= length;
1277 template<typename T, size_t inlineCapacity, typename Allocator>
1278 inline void Vector<T, inlineCapacity, Allocator>::reverse()
1280 for (size_t i = 0; i < m_size / 2; ++i)
1281 std::swap(at(i), at(m_size - 1 - i));
1284 template<typename T, size_t inlineCapacity, typename Allocator>
1285 inline void swap(Vector<T, inlineCapacity, Allocator>& a, Vector<T, inlineCapacity, Allocator>& b)
1287 a.swap(b);
1290 template<typename T, size_t inlineCapacityA, size_t inlineCapacityB, typename Allocator>
1291 bool operator==(const Vector<T, inlineCapacityA, Allocator>& a, const Vector<T, inlineCapacityB, Allocator>& b)
1293 if (a.size() != b.size())
1294 return false;
1295 if (a.isEmpty())
1296 return true;
1297 return VectorTypeOperations<T>::compare(a.data(), b.data(), a.size());
1300 template<typename T, size_t inlineCapacityA, size_t inlineCapacityB, typename Allocator>
1301 inline bool operator!=(const Vector<T, inlineCapacityA, Allocator>& a, const Vector<T, inlineCapacityB, Allocator>& b)
1303 return !(a == b);
1306 // This is only called if the allocator is a HeapAllocator. It is used when
1307 // visiting during a tracing GC.
1308 template<typename T, size_t inlineCapacity, typename Allocator>
1309 template<typename VisitorDispatcher>
1310 void Vector<T, inlineCapacity, Allocator>::trace(VisitorDispatcher visitor)
1312 ASSERT(Allocator::isGarbageCollected); // Garbage collector must be enabled.
1313 if (!buffer())
1314 return;
1315 if (this->hasOutOfLineBuffer()) {
1316 // This is a performance optimization for a case where the buffer
1317 // has been already traced by somewhere. This can happen if
1318 // the conservative scanning traced an on-stack (false-positive
1319 // or real) pointer to the HeapVector, and then visitor->trace()
1320 // traces the HeapVector.
1321 if (Allocator::isHeapObjectAlive(buffer()))
1322 return;
1323 Allocator::markNoTracing(visitor, buffer());
1325 const T* bufferBegin = buffer();
1326 const T* bufferEnd = buffer() + size();
1327 if (ShouldBeTraced<VectorTraits<T>>::value) {
1328 for (const T* bufferEntry = bufferBegin; bufferEntry != bufferEnd; bufferEntry++)
1329 Allocator::template trace<VisitorDispatcher, T, VectorTraits<T>>(visitor, *const_cast<T*>(bufferEntry));
1330 checkUnusedSlots(buffer() + size(), buffer() + capacity());
1334 #if !ENABLE(OILPAN)
1335 template<typename T, size_t N>
1336 struct NeedsTracing<Vector<T, N>> {
1337 static const bool value = false;
1339 #endif
1341 } // namespace WTF
1343 using WTF::Vector;
1345 #endif // WTF_Vector_h