2 * Copyright (C) 2010 Google Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
14 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
15 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
16 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
17 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
18 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
19 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
20 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
21 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
23 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 #include "wtf/Assertions.h"
31 #include "wtf/FastMalloc.h"
32 #include "wtf/Noncopyable.h"
33 #include "wtf/OwnPtr.h"
34 #include "wtf/PassOwnPtr.h"
35 #include "wtf/RefCounted.h"
36 #include "wtf/Vector.h"
40 // An arena which allocates only Plain Old Data (POD), or classes and
41 // structs bottoming out in Plain Old Data. NOTE: the constructors of
42 // the objects allocated in this arena are called, but _not_ their
45 class PODArena final
: public RefCounted
<PODArena
> {
47 // The arena is configured with an allocator, which is responsible
48 // for allocating and freeing chunks of memory at a time.
49 class Allocator
: public RefCounted
<Allocator
> {
51 virtual void* allocate(size_t size
) = 0;
52 virtual void free(void* ptr
) = 0;
54 virtual ~Allocator() { }
55 friend class WTF::RefCounted
<Allocator
>;
58 // The Arena's default allocator, which uses fastMalloc and
59 // fastFree to allocate chunks of storage.
60 class FastMallocAllocator
: public Allocator
{
62 static PassRefPtr
<FastMallocAllocator
> create()
64 return adoptRef(new FastMallocAllocator
);
67 void* allocate(size_t size
) override
{ return fastMalloc(size
); }
68 void free(void* ptr
) override
{ fastFree(ptr
); }
71 FastMallocAllocator() { }
74 // Creates a new PODArena configured with a FastMallocAllocator.
75 static PassRefPtr
<PODArena
> create()
77 return adoptRef(new PODArena
);
80 // Creates a new PODArena configured with the given Allocator.
81 static PassRefPtr
<PODArena
> create(PassRefPtr
<Allocator
> allocator
)
83 return adoptRef(new PODArena(allocator
));
86 // Allocates an object from the arena.
87 template<class T
> T
* allocateObject()
89 return new (allocateBase
<T
>()) T();
92 // Allocates an object from the arena, calling a single-argument constructor.
93 template<class T
, class Argument1Type
> T
* allocateObject(const Argument1Type
& argument1
)
95 return new (allocateBase
<T
>()) T(argument1
);
98 // The initial size of allocated chunks; increases as necessary to
99 // satisfy large allocations. Mainly public for unit tests.
101 DefaultChunkSize
= 16384
105 friend class WTF::RefCounted
<PODArena
>;
108 : m_allocator(FastMallocAllocator::create())
110 , m_currentChunkSize(DefaultChunkSize
) { }
112 explicit PODArena(PassRefPtr
<Allocator
> allocator
)
113 : m_allocator(allocator
)
115 , m_currentChunkSize(DefaultChunkSize
) { }
117 // Returns the alignment requirement for classes and structs on the
119 template <class T
> static size_t minAlignment()
121 return WTF_ALIGN_OF(T
);
124 template<class T
> void* allocateBase()
127 size_t roundedSize
= roundUp(sizeof(T
), minAlignment
<T
>());
129 ptr
= m_current
->allocate(roundedSize
);
132 if (roundedSize
> m_currentChunkSize
)
133 m_currentChunkSize
= roundedSize
;
134 m_chunks
.append(adoptPtr(new Chunk(m_allocator
.get(), m_currentChunkSize
)));
135 m_current
= m_chunks
.last().get();
136 ptr
= m_current
->allocate(roundedSize
);
141 // Rounds up the given allocation size to the specified alignment.
142 size_t roundUp(size_t size
, size_t alignment
)
144 ASSERT(!(alignment
% 2));
145 return (size
+ alignment
- 1) & ~(alignment
- 1);
148 // Manages a chunk of memory and individual allocations out of it.
150 WTF_MAKE_NONCOPYABLE(Chunk
);
152 // Allocates a block of memory of the given size from the passed
154 Chunk(Allocator
* allocator
, size_t size
)
155 : m_allocator(allocator
)
159 m_base
= static_cast<uint8_t*>(m_allocator
->allocate(size
));
162 // Frees the memory allocated from the Allocator in the
166 m_allocator
->free(m_base
);
169 // Returns a pointer to "size" bytes of storage, or 0 if this
170 // Chunk could not satisfy the allocation.
171 void* allocate(size_t size
)
173 // Check for overflow
174 if (m_currentOffset
+ size
< m_currentOffset
)
177 if (m_currentOffset
+ size
> m_size
)
180 void* result
= m_base
+ m_currentOffset
;
181 m_currentOffset
+= size
;
186 Allocator
* m_allocator
;
189 size_t m_currentOffset
;
192 RefPtr
<Allocator
> m_allocator
;
194 size_t m_currentChunkSize
;
195 Vector
<OwnPtr
<Chunk
>> m_chunks
;