1 //===- llvm/unittest/Support/AllocatorTest.cpp - BumpPtrAllocator tests ---===//
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7 //===----------------------------------------------------------------------===//
9 #include "llvm/Support/Memory.h"
10 #include "llvm/Support/Process.h"
11 #include "gtest/gtest.h"
14 #if defined(__NetBSD__)
16 #include <sys/param.h>
17 #include <sys/types.h>
18 #include <sys/sysctl.h>
30 #if defined(__NetBSD__)
33 size_t len
= sizeof(paxflags
);
37 mib
[2] = PROC_PID_PAXFLAGS
;
39 if (sysctl(mib
, 3, &paxflags
, &len
, NULL
, 0) != 0)
40 err(EXIT_FAILURE
, "sysctl");
42 return !!(paxflags
& CTL_PROC_PAXFLAGS_MPROTECT
);
43 #elif (defined(__APPLE__) && defined(__aarch64__)) || defined(__OpenBSD__)
50 class MappedMemoryTest
: public ::testing::TestWithParam
<unsigned> {
54 PageSize
= sys::Process::getPageSizeEstimate();
58 // Adds RW flags to permit testing of the resulting memory
59 unsigned getTestableEquivalent(unsigned RequestedFlags
) {
60 switch (RequestedFlags
) {
62 case Memory::MF_WRITE
:
63 case Memory::MF_READ
|Memory::MF_WRITE
:
64 return Memory::MF_READ
|Memory::MF_WRITE
;
65 case Memory::MF_READ
|Memory::MF_EXEC
:
66 case Memory::MF_READ
|Memory::MF_WRITE
|Memory::MF_EXEC
:
68 return Memory::MF_READ
|Memory::MF_WRITE
|Memory::MF_EXEC
;
70 // Default in case values are added to the enum, as required by some compilers
71 return Memory::MF_READ
|Memory::MF_WRITE
;
74 // Returns true if the memory blocks overlap
75 bool doesOverlap(MemoryBlock M1
, MemoryBlock M2
) {
76 if (M1
.base() == M2
.base())
79 if (M1
.base() > M2
.base())
80 return (unsigned char *)M2
.base() + M2
.allocatedSize() > M1
.base();
82 return (unsigned char *)M1
.base() + M1
.allocatedSize() > M2
.base();
89 // MPROTECT prevents W+X mmaps
90 #define CHECK_UNSUPPORTED() \
92 if ((Flags & Memory::MF_WRITE) && (Flags & Memory::MF_EXEC) && \
97 TEST_P(MappedMemoryTest
, AllocAndRelease
) {
100 MemoryBlock M1
= Memory::allocateMappedMemory(sizeof(int), nullptr, Flags
,EC
);
101 EXPECT_EQ(std::error_code(), EC
);
103 EXPECT_NE((void*)nullptr, M1
.base());
104 EXPECT_LE(sizeof(int), M1
.allocatedSize());
106 EXPECT_FALSE(Memory::releaseMappedMemory(M1
));
109 TEST_P(MappedMemoryTest
, AllocAndReleaseHuge
) {
112 MemoryBlock M1
= Memory::allocateMappedMemory(
113 sizeof(int), nullptr, Flags
| Memory::MF_HUGE_HINT
, EC
);
114 EXPECT_EQ(std::error_code(), EC
);
116 // Test large/huge memory pages. In the worst case, 4kb pages should be
117 // returned, if large pages aren't available.
119 EXPECT_NE((void *)nullptr, M1
.base());
120 EXPECT_LE(sizeof(int), M1
.allocatedSize());
122 EXPECT_FALSE(Memory::releaseMappedMemory(M1
));
125 TEST_P(MappedMemoryTest
, MultipleAllocAndRelease
) {
128 MemoryBlock M1
= Memory::allocateMappedMemory(16, nullptr, Flags
, EC
);
129 EXPECT_EQ(std::error_code(), EC
);
130 MemoryBlock M2
= Memory::allocateMappedMemory(64, nullptr, Flags
, EC
);
131 EXPECT_EQ(std::error_code(), EC
);
132 MemoryBlock M3
= Memory::allocateMappedMemory(32, nullptr, Flags
, EC
);
133 EXPECT_EQ(std::error_code(), EC
);
135 EXPECT_NE((void*)nullptr, M1
.base());
136 EXPECT_LE(16U, M1
.allocatedSize());
137 EXPECT_NE((void*)nullptr, M2
.base());
138 EXPECT_LE(64U, M2
.allocatedSize());
139 EXPECT_NE((void*)nullptr, M3
.base());
140 EXPECT_LE(32U, M3
.allocatedSize());
142 EXPECT_FALSE(doesOverlap(M1
, M2
));
143 EXPECT_FALSE(doesOverlap(M2
, M3
));
144 EXPECT_FALSE(doesOverlap(M1
, M3
));
146 EXPECT_FALSE(Memory::releaseMappedMemory(M1
));
147 EXPECT_FALSE(Memory::releaseMappedMemory(M3
));
148 MemoryBlock M4
= Memory::allocateMappedMemory(16, nullptr, Flags
, EC
);
149 EXPECT_EQ(std::error_code(), EC
);
150 EXPECT_NE((void*)nullptr, M4
.base());
151 EXPECT_LE(16U, M4
.allocatedSize());
152 EXPECT_FALSE(Memory::releaseMappedMemory(M4
));
153 EXPECT_FALSE(Memory::releaseMappedMemory(M2
));
156 TEST_P(MappedMemoryTest
, BasicWrite
) {
157 // This test applies only to readable and writeable combinations
159 !((Flags
& Memory::MF_READ
) && (Flags
& Memory::MF_WRITE
)))
164 MemoryBlock M1
= Memory::allocateMappedMemory(sizeof(int), nullptr, Flags
,EC
);
165 EXPECT_EQ(std::error_code(), EC
);
167 EXPECT_NE((void*)nullptr, M1
.base());
168 EXPECT_LE(sizeof(int), M1
.allocatedSize());
170 int *a
= (int*)M1
.base();
174 EXPECT_FALSE(Memory::releaseMappedMemory(M1
));
177 TEST_P(MappedMemoryTest
, MultipleWrite
) {
178 // This test applies only to readable and writeable combinations
180 !((Flags
& Memory::MF_READ
) && (Flags
& Memory::MF_WRITE
)))
185 MemoryBlock M1
= Memory::allocateMappedMemory(sizeof(int), nullptr, Flags
,
187 EXPECT_EQ(std::error_code(), EC
);
188 MemoryBlock M2
= Memory::allocateMappedMemory(8 * sizeof(int), nullptr, Flags
,
190 EXPECT_EQ(std::error_code(), EC
);
191 MemoryBlock M3
= Memory::allocateMappedMemory(4 * sizeof(int), nullptr, Flags
,
193 EXPECT_EQ(std::error_code(), EC
);
195 EXPECT_FALSE(doesOverlap(M1
, M2
));
196 EXPECT_FALSE(doesOverlap(M2
, M3
));
197 EXPECT_FALSE(doesOverlap(M1
, M3
));
199 EXPECT_NE((void*)nullptr, M1
.base());
200 EXPECT_LE(1U * sizeof(int), M1
.allocatedSize());
201 EXPECT_NE((void*)nullptr, M2
.base());
202 EXPECT_LE(8U * sizeof(int), M2
.allocatedSize());
203 EXPECT_NE((void*)nullptr, M3
.base());
204 EXPECT_LE(4U * sizeof(int), M3
.allocatedSize());
206 int *x
= (int*)M1
.base();
209 int *y
= (int*)M2
.base();
210 for (int i
= 0; i
< 8; i
++) {
214 int *z
= (int*)M3
.base();
221 EXPECT_FALSE(Memory::releaseMappedMemory(M1
));
222 EXPECT_FALSE(Memory::releaseMappedMemory(M3
));
224 MemoryBlock M4
= Memory::allocateMappedMemory(64 * sizeof(int), nullptr,
226 EXPECT_EQ(std::error_code(), EC
);
227 EXPECT_NE((void*)nullptr, M4
.base());
228 EXPECT_LE(64U * sizeof(int), M4
.allocatedSize());
232 EXPECT_FALSE(Memory::releaseMappedMemory(M4
));
234 // Verify that M2 remains unaffected by other activity
235 for (int i
= 0; i
< 8; i
++) {
238 EXPECT_FALSE(Memory::releaseMappedMemory(M2
));
241 TEST_P(MappedMemoryTest
, EnabledWrite
) {
242 // MPROTECT prevents W+X, and since this test always adds W we need
243 // to block any variant with X.
244 if ((Flags
& Memory::MF_EXEC
) && IsMPROTECT())
248 MemoryBlock M1
= Memory::allocateMappedMemory(2 * sizeof(int), nullptr, Flags
,
250 EXPECT_EQ(std::error_code(), EC
);
251 MemoryBlock M2
= Memory::allocateMappedMemory(8 * sizeof(int), nullptr, Flags
,
253 EXPECT_EQ(std::error_code(), EC
);
254 MemoryBlock M3
= Memory::allocateMappedMemory(4 * sizeof(int), nullptr, Flags
,
256 EXPECT_EQ(std::error_code(), EC
);
258 EXPECT_NE((void*)nullptr, M1
.base());
259 EXPECT_LE(2U * sizeof(int), M1
.allocatedSize());
260 EXPECT_NE((void*)nullptr, M2
.base());
261 EXPECT_LE(8U * sizeof(int), M2
.allocatedSize());
262 EXPECT_NE((void*)nullptr, M3
.base());
263 EXPECT_LE(4U * sizeof(int), M3
.allocatedSize());
265 EXPECT_FALSE(Memory::protectMappedMemory(M1
, getTestableEquivalent(Flags
)));
266 EXPECT_FALSE(Memory::protectMappedMemory(M2
, getTestableEquivalent(Flags
)));
267 EXPECT_FALSE(Memory::protectMappedMemory(M3
, getTestableEquivalent(Flags
)));
269 EXPECT_FALSE(doesOverlap(M1
, M2
));
270 EXPECT_FALSE(doesOverlap(M2
, M3
));
271 EXPECT_FALSE(doesOverlap(M1
, M3
));
273 int *x
= (int*)M1
.base();
275 int *y
= (int*)M2
.base();
276 for (unsigned int i
= 0; i
< 8; i
++) {
279 int *z
= (int*)M3
.base();
286 EXPECT_FALSE(Memory::releaseMappedMemory(M1
));
287 EXPECT_FALSE(Memory::releaseMappedMemory(M3
));
290 MemoryBlock M4
= Memory::allocateMappedMemory(16, nullptr, Flags
, EC
);
291 EXPECT_EQ(std::error_code(), EC
);
292 EXPECT_NE((void*)nullptr, M4
.base());
293 EXPECT_LE(16U, M4
.allocatedSize());
294 EXPECT_EQ(std::error_code(),
295 Memory::protectMappedMemory(M4
, getTestableEquivalent(Flags
)));
299 EXPECT_FALSE(Memory::releaseMappedMemory(M4
));
300 EXPECT_FALSE(Memory::releaseMappedMemory(M2
));
303 TEST_P(MappedMemoryTest
, SuccessiveNear
) {
306 MemoryBlock M1
= Memory::allocateMappedMemory(16, nullptr, Flags
, EC
);
307 EXPECT_EQ(std::error_code(), EC
);
308 MemoryBlock M2
= Memory::allocateMappedMemory(64, &M1
, Flags
, EC
);
309 EXPECT_EQ(std::error_code(), EC
);
310 MemoryBlock M3
= Memory::allocateMappedMemory(32, &M2
, Flags
, EC
);
311 EXPECT_EQ(std::error_code(), EC
);
313 EXPECT_NE((void*)nullptr, M1
.base());
314 EXPECT_LE(16U, M1
.allocatedSize());
315 EXPECT_NE((void*)nullptr, M2
.base());
316 EXPECT_LE(64U, M2
.allocatedSize());
317 EXPECT_NE((void*)nullptr, M3
.base());
318 EXPECT_LE(32U, M3
.allocatedSize());
320 EXPECT_FALSE(doesOverlap(M1
, M2
));
321 EXPECT_FALSE(doesOverlap(M2
, M3
));
322 EXPECT_FALSE(doesOverlap(M1
, M3
));
324 EXPECT_FALSE(Memory::releaseMappedMemory(M1
));
325 EXPECT_FALSE(Memory::releaseMappedMemory(M3
));
326 EXPECT_FALSE(Memory::releaseMappedMemory(M2
));
329 TEST_P(MappedMemoryTest
, DuplicateNear
) {
332 MemoryBlock
Near((void*)(3*PageSize
), 16);
333 MemoryBlock M1
= Memory::allocateMappedMemory(16, &Near
, Flags
, EC
);
334 EXPECT_EQ(std::error_code(), EC
);
335 MemoryBlock M2
= Memory::allocateMappedMemory(64, &Near
, Flags
, EC
);
336 EXPECT_EQ(std::error_code(), EC
);
337 MemoryBlock M3
= Memory::allocateMappedMemory(32, &Near
, Flags
, EC
);
338 EXPECT_EQ(std::error_code(), EC
);
340 EXPECT_NE((void*)nullptr, M1
.base());
341 EXPECT_LE(16U, M1
.allocatedSize());
342 EXPECT_NE((void*)nullptr, M2
.base());
343 EXPECT_LE(64U, M2
.allocatedSize());
344 EXPECT_NE((void*)nullptr, M3
.base());
345 EXPECT_LE(32U, M3
.allocatedSize());
347 EXPECT_FALSE(Memory::releaseMappedMemory(M1
));
348 EXPECT_FALSE(Memory::releaseMappedMemory(M3
));
349 EXPECT_FALSE(Memory::releaseMappedMemory(M2
));
352 TEST_P(MappedMemoryTest
, ZeroNear
) {
355 MemoryBlock
Near(nullptr, 0);
356 MemoryBlock M1
= Memory::allocateMappedMemory(16, &Near
, Flags
, EC
);
357 EXPECT_EQ(std::error_code(), EC
);
358 MemoryBlock M2
= Memory::allocateMappedMemory(64, &Near
, Flags
, EC
);
359 EXPECT_EQ(std::error_code(), EC
);
360 MemoryBlock M3
= Memory::allocateMappedMemory(32, &Near
, Flags
, EC
);
361 EXPECT_EQ(std::error_code(), EC
);
363 EXPECT_NE((void*)nullptr, M1
.base());
364 EXPECT_LE(16U, M1
.allocatedSize());
365 EXPECT_NE((void*)nullptr, M2
.base());
366 EXPECT_LE(64U, M2
.allocatedSize());
367 EXPECT_NE((void*)nullptr, M3
.base());
368 EXPECT_LE(32U, M3
.allocatedSize());
370 EXPECT_FALSE(doesOverlap(M1
, M2
));
371 EXPECT_FALSE(doesOverlap(M2
, M3
));
372 EXPECT_FALSE(doesOverlap(M1
, M3
));
374 EXPECT_FALSE(Memory::releaseMappedMemory(M1
));
375 EXPECT_FALSE(Memory::releaseMappedMemory(M3
));
376 EXPECT_FALSE(Memory::releaseMappedMemory(M2
));
379 TEST_P(MappedMemoryTest
, ZeroSizeNear
) {
382 MemoryBlock
Near((void*)(4*PageSize
), 0);
383 MemoryBlock M1
= Memory::allocateMappedMemory(16, &Near
, Flags
, EC
);
384 EXPECT_EQ(std::error_code(), EC
);
385 MemoryBlock M2
= Memory::allocateMappedMemory(64, &Near
, Flags
, EC
);
386 EXPECT_EQ(std::error_code(), EC
);
387 MemoryBlock M3
= Memory::allocateMappedMemory(32, &Near
, Flags
, EC
);
388 EXPECT_EQ(std::error_code(), EC
);
390 EXPECT_NE((void*)nullptr, M1
.base());
391 EXPECT_LE(16U, M1
.allocatedSize());
392 EXPECT_NE((void*)nullptr, M2
.base());
393 EXPECT_LE(64U, M2
.allocatedSize());
394 EXPECT_NE((void*)nullptr, M3
.base());
395 EXPECT_LE(32U, M3
.allocatedSize());
397 EXPECT_FALSE(doesOverlap(M1
, M2
));
398 EXPECT_FALSE(doesOverlap(M2
, M3
));
399 EXPECT_FALSE(doesOverlap(M1
, M3
));
401 EXPECT_FALSE(Memory::releaseMappedMemory(M1
));
402 EXPECT_FALSE(Memory::releaseMappedMemory(M3
));
403 EXPECT_FALSE(Memory::releaseMappedMemory(M2
));
406 TEST_P(MappedMemoryTest
, UnalignedNear
) {
409 MemoryBlock
Near((void*)(2*PageSize
+5), 0);
410 MemoryBlock M1
= Memory::allocateMappedMemory(15, &Near
, Flags
, EC
);
411 EXPECT_EQ(std::error_code(), EC
);
413 EXPECT_NE((void*)nullptr, M1
.base());
414 EXPECT_LE(sizeof(int), M1
.allocatedSize());
416 EXPECT_FALSE(Memory::releaseMappedMemory(M1
));
419 // Note that Memory::MF_WRITE is not supported exclusively across
420 // operating systems and architectures and can imply MF_READ|MF_WRITE
421 unsigned MemoryFlags
[] = {
424 Memory::MF_READ
|Memory::MF_WRITE
,
426 Memory::MF_READ
|Memory::MF_EXEC
,
427 Memory::MF_READ
|Memory::MF_WRITE
|Memory::MF_EXEC
430 INSTANTIATE_TEST_SUITE_P(AllocationTests
, MappedMemoryTest
,
431 ::testing::ValuesIn(MemoryFlags
));
433 } // anonymous namespace