Bump version to 19.1.0-rc3
[llvm-project.git] / llvm / unittests / Support / MemoryTest.cpp
blob9daa6d0ff9e4dd2840075d362b9d646c543c1ad8
1 //===- llvm/unittest/Support/AllocatorTest.cpp - BumpPtrAllocator tests ---===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
9 #include "llvm/Support/Memory.h"
10 #include "llvm/Support/Process.h"
11 #include "gtest/gtest.h"
12 #include <cstdlib>
14 #if defined(__NetBSD__)
15 // clang-format off
16 #include <sys/param.h>
17 #include <sys/types.h>
18 #include <sys/sysctl.h>
19 #include <err.h>
20 #include <unistd.h>
21 // clang-format on
22 #endif
24 using namespace llvm;
25 using namespace sys;
27 namespace {
29 bool IsMPROTECT() {
30 #if defined(__NetBSD__)
31 int mib[3];
32 int paxflags;
33 size_t len = sizeof(paxflags);
35 mib[0] = CTL_PROC;
36 mib[1] = getpid();
37 mib[2] = PROC_PID_PAXFLAGS;
39 if (sysctl(mib, 3, &paxflags, &len, NULL, 0) != 0)
40 err(EXIT_FAILURE, "sysctl");
42 return !!(paxflags & CTL_PROC_PAXFLAGS_MPROTECT);
43 #elif (defined(__APPLE__) && defined(__aarch64__)) || defined(__OpenBSD__)
44 return true;
45 #else
46 return false;
47 #endif
50 class MappedMemoryTest : public ::testing::TestWithParam<unsigned> {
51 public:
52 MappedMemoryTest() {
53 Flags = GetParam();
54 PageSize = sys::Process::getPageSizeEstimate();
57 protected:
58 // Adds RW flags to permit testing of the resulting memory
59 unsigned getTestableEquivalent(unsigned RequestedFlags) {
60 switch (RequestedFlags) {
61 case Memory::MF_READ:
62 case Memory::MF_WRITE:
63 case Memory::MF_READ|Memory::MF_WRITE:
64 return Memory::MF_READ|Memory::MF_WRITE;
65 case Memory::MF_READ|Memory::MF_EXEC:
66 case Memory::MF_READ|Memory::MF_WRITE|Memory::MF_EXEC:
67 case Memory::MF_EXEC:
68 return Memory::MF_READ|Memory::MF_WRITE|Memory::MF_EXEC;
70 // Default in case values are added to the enum, as required by some compilers
71 return Memory::MF_READ|Memory::MF_WRITE;
74 // Returns true if the memory blocks overlap
75 bool doesOverlap(MemoryBlock M1, MemoryBlock M2) {
76 if (M1.base() == M2.base())
77 return true;
79 if (M1.base() > M2.base())
80 return (unsigned char *)M2.base() + M2.allocatedSize() > M1.base();
82 return (unsigned char *)M1.base() + M1.allocatedSize() > M2.base();
85 unsigned Flags;
86 size_t PageSize;
89 // MPROTECT prevents W+X mmaps
90 #define CHECK_UNSUPPORTED() \
91 do { \
92 if ((Flags & Memory::MF_WRITE) && (Flags & Memory::MF_EXEC) && \
93 IsMPROTECT()) \
94 GTEST_SKIP(); \
95 } while (0)
97 TEST_P(MappedMemoryTest, AllocAndRelease) {
98 CHECK_UNSUPPORTED();
99 std::error_code EC;
100 MemoryBlock M1 = Memory::allocateMappedMemory(sizeof(int), nullptr, Flags,EC);
101 EXPECT_EQ(std::error_code(), EC);
103 EXPECT_NE((void*)nullptr, M1.base());
104 EXPECT_LE(sizeof(int), M1.allocatedSize());
106 EXPECT_FALSE(Memory::releaseMappedMemory(M1));
109 TEST_P(MappedMemoryTest, AllocAndReleaseHuge) {
110 CHECK_UNSUPPORTED();
111 std::error_code EC;
112 MemoryBlock M1 = Memory::allocateMappedMemory(
113 sizeof(int), nullptr, Flags | Memory::MF_HUGE_HINT, EC);
114 EXPECT_EQ(std::error_code(), EC);
116 // Test large/huge memory pages. In the worst case, 4kb pages should be
117 // returned, if large pages aren't available.
119 EXPECT_NE((void *)nullptr, M1.base());
120 EXPECT_LE(sizeof(int), M1.allocatedSize());
122 EXPECT_FALSE(Memory::releaseMappedMemory(M1));
125 TEST_P(MappedMemoryTest, MultipleAllocAndRelease) {
126 CHECK_UNSUPPORTED();
127 std::error_code EC;
128 MemoryBlock M1 = Memory::allocateMappedMemory(16, nullptr, Flags, EC);
129 EXPECT_EQ(std::error_code(), EC);
130 MemoryBlock M2 = Memory::allocateMappedMemory(64, nullptr, Flags, EC);
131 EXPECT_EQ(std::error_code(), EC);
132 MemoryBlock M3 = Memory::allocateMappedMemory(32, nullptr, Flags, EC);
133 EXPECT_EQ(std::error_code(), EC);
135 EXPECT_NE((void*)nullptr, M1.base());
136 EXPECT_LE(16U, M1.allocatedSize());
137 EXPECT_NE((void*)nullptr, M2.base());
138 EXPECT_LE(64U, M2.allocatedSize());
139 EXPECT_NE((void*)nullptr, M3.base());
140 EXPECT_LE(32U, M3.allocatedSize());
142 EXPECT_FALSE(doesOverlap(M1, M2));
143 EXPECT_FALSE(doesOverlap(M2, M3));
144 EXPECT_FALSE(doesOverlap(M1, M3));
146 EXPECT_FALSE(Memory::releaseMappedMemory(M1));
147 EXPECT_FALSE(Memory::releaseMappedMemory(M3));
148 MemoryBlock M4 = Memory::allocateMappedMemory(16, nullptr, Flags, EC);
149 EXPECT_EQ(std::error_code(), EC);
150 EXPECT_NE((void*)nullptr, M4.base());
151 EXPECT_LE(16U, M4.allocatedSize());
152 EXPECT_FALSE(Memory::releaseMappedMemory(M4));
153 EXPECT_FALSE(Memory::releaseMappedMemory(M2));
156 TEST_P(MappedMemoryTest, BasicWrite) {
157 // This test applies only to readable and writeable combinations
158 if (Flags &&
159 !((Flags & Memory::MF_READ) && (Flags & Memory::MF_WRITE)))
160 GTEST_SKIP();
161 CHECK_UNSUPPORTED();
163 std::error_code EC;
164 MemoryBlock M1 = Memory::allocateMappedMemory(sizeof(int), nullptr, Flags,EC);
165 EXPECT_EQ(std::error_code(), EC);
167 EXPECT_NE((void*)nullptr, M1.base());
168 EXPECT_LE(sizeof(int), M1.allocatedSize());
170 int *a = (int*)M1.base();
171 *a = 1;
172 EXPECT_EQ(1, *a);
174 EXPECT_FALSE(Memory::releaseMappedMemory(M1));
177 TEST_P(MappedMemoryTest, MultipleWrite) {
178 // This test applies only to readable and writeable combinations
179 if (Flags &&
180 !((Flags & Memory::MF_READ) && (Flags & Memory::MF_WRITE)))
181 GTEST_SKIP();
182 CHECK_UNSUPPORTED();
184 std::error_code EC;
185 MemoryBlock M1 = Memory::allocateMappedMemory(sizeof(int), nullptr, Flags,
186 EC);
187 EXPECT_EQ(std::error_code(), EC);
188 MemoryBlock M2 = Memory::allocateMappedMemory(8 * sizeof(int), nullptr, Flags,
189 EC);
190 EXPECT_EQ(std::error_code(), EC);
191 MemoryBlock M3 = Memory::allocateMappedMemory(4 * sizeof(int), nullptr, Flags,
192 EC);
193 EXPECT_EQ(std::error_code(), EC);
195 EXPECT_FALSE(doesOverlap(M1, M2));
196 EXPECT_FALSE(doesOverlap(M2, M3));
197 EXPECT_FALSE(doesOverlap(M1, M3));
199 EXPECT_NE((void*)nullptr, M1.base());
200 EXPECT_LE(1U * sizeof(int), M1.allocatedSize());
201 EXPECT_NE((void*)nullptr, M2.base());
202 EXPECT_LE(8U * sizeof(int), M2.allocatedSize());
203 EXPECT_NE((void*)nullptr, M3.base());
204 EXPECT_LE(4U * sizeof(int), M3.allocatedSize());
206 int *x = (int*)M1.base();
207 *x = 1;
209 int *y = (int*)M2.base();
210 for (int i = 0; i < 8; i++) {
211 y[i] = i;
214 int *z = (int*)M3.base();
215 *z = 42;
217 EXPECT_EQ(1, *x);
218 EXPECT_EQ(7, y[7]);
219 EXPECT_EQ(42, *z);
221 EXPECT_FALSE(Memory::releaseMappedMemory(M1));
222 EXPECT_FALSE(Memory::releaseMappedMemory(M3));
224 MemoryBlock M4 = Memory::allocateMappedMemory(64 * sizeof(int), nullptr,
225 Flags, EC);
226 EXPECT_EQ(std::error_code(), EC);
227 EXPECT_NE((void*)nullptr, M4.base());
228 EXPECT_LE(64U * sizeof(int), M4.allocatedSize());
229 x = (int*)M4.base();
230 *x = 4;
231 EXPECT_EQ(4, *x);
232 EXPECT_FALSE(Memory::releaseMappedMemory(M4));
234 // Verify that M2 remains unaffected by other activity
235 for (int i = 0; i < 8; i++) {
236 EXPECT_EQ(i, y[i]);
238 EXPECT_FALSE(Memory::releaseMappedMemory(M2));
241 TEST_P(MappedMemoryTest, EnabledWrite) {
242 // MPROTECT prevents W+X, and since this test always adds W we need
243 // to block any variant with X.
244 if ((Flags & Memory::MF_EXEC) && IsMPROTECT())
245 GTEST_SKIP();
247 std::error_code EC;
248 MemoryBlock M1 = Memory::allocateMappedMemory(2 * sizeof(int), nullptr, Flags,
249 EC);
250 EXPECT_EQ(std::error_code(), EC);
251 MemoryBlock M2 = Memory::allocateMappedMemory(8 * sizeof(int), nullptr, Flags,
252 EC);
253 EXPECT_EQ(std::error_code(), EC);
254 MemoryBlock M3 = Memory::allocateMappedMemory(4 * sizeof(int), nullptr, Flags,
255 EC);
256 EXPECT_EQ(std::error_code(), EC);
258 EXPECT_NE((void*)nullptr, M1.base());
259 EXPECT_LE(2U * sizeof(int), M1.allocatedSize());
260 EXPECT_NE((void*)nullptr, M2.base());
261 EXPECT_LE(8U * sizeof(int), M2.allocatedSize());
262 EXPECT_NE((void*)nullptr, M3.base());
263 EXPECT_LE(4U * sizeof(int), M3.allocatedSize());
265 EXPECT_FALSE(Memory::protectMappedMemory(M1, getTestableEquivalent(Flags)));
266 EXPECT_FALSE(Memory::protectMappedMemory(M2, getTestableEquivalent(Flags)));
267 EXPECT_FALSE(Memory::protectMappedMemory(M3, getTestableEquivalent(Flags)));
269 EXPECT_FALSE(doesOverlap(M1, M2));
270 EXPECT_FALSE(doesOverlap(M2, M3));
271 EXPECT_FALSE(doesOverlap(M1, M3));
273 int *x = (int*)M1.base();
274 *x = 1;
275 int *y = (int*)M2.base();
276 for (unsigned int i = 0; i < 8; i++) {
277 y[i] = i;
279 int *z = (int*)M3.base();
280 *z = 42;
282 EXPECT_EQ(1, *x);
283 EXPECT_EQ(7, y[7]);
284 EXPECT_EQ(42, *z);
286 EXPECT_FALSE(Memory::releaseMappedMemory(M1));
287 EXPECT_FALSE(Memory::releaseMappedMemory(M3));
288 EXPECT_EQ(6, y[6]);
290 MemoryBlock M4 = Memory::allocateMappedMemory(16, nullptr, Flags, EC);
291 EXPECT_EQ(std::error_code(), EC);
292 EXPECT_NE((void*)nullptr, M4.base());
293 EXPECT_LE(16U, M4.allocatedSize());
294 EXPECT_EQ(std::error_code(),
295 Memory::protectMappedMemory(M4, getTestableEquivalent(Flags)));
296 x = (int*)M4.base();
297 *x = 4;
298 EXPECT_EQ(4, *x);
299 EXPECT_FALSE(Memory::releaseMappedMemory(M4));
300 EXPECT_FALSE(Memory::releaseMappedMemory(M2));
303 TEST_P(MappedMemoryTest, SuccessiveNear) {
304 CHECK_UNSUPPORTED();
305 std::error_code EC;
306 MemoryBlock M1 = Memory::allocateMappedMemory(16, nullptr, Flags, EC);
307 EXPECT_EQ(std::error_code(), EC);
308 MemoryBlock M2 = Memory::allocateMappedMemory(64, &M1, Flags, EC);
309 EXPECT_EQ(std::error_code(), EC);
310 MemoryBlock M3 = Memory::allocateMappedMemory(32, &M2, Flags, EC);
311 EXPECT_EQ(std::error_code(), EC);
313 EXPECT_NE((void*)nullptr, M1.base());
314 EXPECT_LE(16U, M1.allocatedSize());
315 EXPECT_NE((void*)nullptr, M2.base());
316 EXPECT_LE(64U, M2.allocatedSize());
317 EXPECT_NE((void*)nullptr, M3.base());
318 EXPECT_LE(32U, M3.allocatedSize());
320 EXPECT_FALSE(doesOverlap(M1, M2));
321 EXPECT_FALSE(doesOverlap(M2, M3));
322 EXPECT_FALSE(doesOverlap(M1, M3));
324 EXPECT_FALSE(Memory::releaseMappedMemory(M1));
325 EXPECT_FALSE(Memory::releaseMappedMemory(M3));
326 EXPECT_FALSE(Memory::releaseMappedMemory(M2));
329 TEST_P(MappedMemoryTest, DuplicateNear) {
330 CHECK_UNSUPPORTED();
331 std::error_code EC;
332 MemoryBlock Near((void*)(3*PageSize), 16);
333 MemoryBlock M1 = Memory::allocateMappedMemory(16, &Near, Flags, EC);
334 EXPECT_EQ(std::error_code(), EC);
335 MemoryBlock M2 = Memory::allocateMappedMemory(64, &Near, Flags, EC);
336 EXPECT_EQ(std::error_code(), EC);
337 MemoryBlock M3 = Memory::allocateMappedMemory(32, &Near, Flags, EC);
338 EXPECT_EQ(std::error_code(), EC);
340 EXPECT_NE((void*)nullptr, M1.base());
341 EXPECT_LE(16U, M1.allocatedSize());
342 EXPECT_NE((void*)nullptr, M2.base());
343 EXPECT_LE(64U, M2.allocatedSize());
344 EXPECT_NE((void*)nullptr, M3.base());
345 EXPECT_LE(32U, M3.allocatedSize());
347 EXPECT_FALSE(Memory::releaseMappedMemory(M1));
348 EXPECT_FALSE(Memory::releaseMappedMemory(M3));
349 EXPECT_FALSE(Memory::releaseMappedMemory(M2));
352 TEST_P(MappedMemoryTest, ZeroNear) {
353 CHECK_UNSUPPORTED();
354 std::error_code EC;
355 MemoryBlock Near(nullptr, 0);
356 MemoryBlock M1 = Memory::allocateMappedMemory(16, &Near, Flags, EC);
357 EXPECT_EQ(std::error_code(), EC);
358 MemoryBlock M2 = Memory::allocateMappedMemory(64, &Near, Flags, EC);
359 EXPECT_EQ(std::error_code(), EC);
360 MemoryBlock M3 = Memory::allocateMappedMemory(32, &Near, Flags, EC);
361 EXPECT_EQ(std::error_code(), EC);
363 EXPECT_NE((void*)nullptr, M1.base());
364 EXPECT_LE(16U, M1.allocatedSize());
365 EXPECT_NE((void*)nullptr, M2.base());
366 EXPECT_LE(64U, M2.allocatedSize());
367 EXPECT_NE((void*)nullptr, M3.base());
368 EXPECT_LE(32U, M3.allocatedSize());
370 EXPECT_FALSE(doesOverlap(M1, M2));
371 EXPECT_FALSE(doesOverlap(M2, M3));
372 EXPECT_FALSE(doesOverlap(M1, M3));
374 EXPECT_FALSE(Memory::releaseMappedMemory(M1));
375 EXPECT_FALSE(Memory::releaseMappedMemory(M3));
376 EXPECT_FALSE(Memory::releaseMappedMemory(M2));
379 TEST_P(MappedMemoryTest, ZeroSizeNear) {
380 CHECK_UNSUPPORTED();
381 std::error_code EC;
382 MemoryBlock Near((void*)(4*PageSize), 0);
383 MemoryBlock M1 = Memory::allocateMappedMemory(16, &Near, Flags, EC);
384 EXPECT_EQ(std::error_code(), EC);
385 MemoryBlock M2 = Memory::allocateMappedMemory(64, &Near, Flags, EC);
386 EXPECT_EQ(std::error_code(), EC);
387 MemoryBlock M3 = Memory::allocateMappedMemory(32, &Near, Flags, EC);
388 EXPECT_EQ(std::error_code(), EC);
390 EXPECT_NE((void*)nullptr, M1.base());
391 EXPECT_LE(16U, M1.allocatedSize());
392 EXPECT_NE((void*)nullptr, M2.base());
393 EXPECT_LE(64U, M2.allocatedSize());
394 EXPECT_NE((void*)nullptr, M3.base());
395 EXPECT_LE(32U, M3.allocatedSize());
397 EXPECT_FALSE(doesOverlap(M1, M2));
398 EXPECT_FALSE(doesOverlap(M2, M3));
399 EXPECT_FALSE(doesOverlap(M1, M3));
401 EXPECT_FALSE(Memory::releaseMappedMemory(M1));
402 EXPECT_FALSE(Memory::releaseMappedMemory(M3));
403 EXPECT_FALSE(Memory::releaseMappedMemory(M2));
406 TEST_P(MappedMemoryTest, UnalignedNear) {
407 CHECK_UNSUPPORTED();
408 std::error_code EC;
409 MemoryBlock Near((void*)(2*PageSize+5), 0);
410 MemoryBlock M1 = Memory::allocateMappedMemory(15, &Near, Flags, EC);
411 EXPECT_EQ(std::error_code(), EC);
413 EXPECT_NE((void*)nullptr, M1.base());
414 EXPECT_LE(sizeof(int), M1.allocatedSize());
416 EXPECT_FALSE(Memory::releaseMappedMemory(M1));
419 // Note that Memory::MF_WRITE is not supported exclusively across
420 // operating systems and architectures and can imply MF_READ|MF_WRITE
421 unsigned MemoryFlags[] = {
422 Memory::MF_READ,
423 Memory::MF_WRITE,
424 Memory::MF_READ|Memory::MF_WRITE,
425 Memory::MF_EXEC,
426 Memory::MF_READ|Memory::MF_EXEC,
427 Memory::MF_READ|Memory::MF_WRITE|Memory::MF_EXEC
430 INSTANTIATE_TEST_SUITE_P(AllocationTests, MappedMemoryTest,
431 ::testing::ValuesIn(MemoryFlags));
433 } // anonymous namespace