[LLVM][Alignment] Fix AlignmentTest on platform where size_t != uint64_t
[llvm-core.git] / unittests / Support / MemoryTest.cpp
blobaf33dc32c81b6e0af285775281f9c9cd0a73ca7e
1 //===- llvm/unittest/Support/AllocatorTest.cpp - BumpPtrAllocator tests ---===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
9 #include "llvm/Support/Memory.h"
10 #include "llvm/Support/Process.h"
11 #include "gtest/gtest.h"
12 #include <cassert>
13 #include <cstdlib>
15 #if defined(__NetBSD__)
16 // clang-format off
17 #include <sys/param.h>
18 #include <sys/types.h>
19 #include <sys/sysctl.h>
20 #include <err.h>
21 #include <unistd.h>
22 // clang-format on
23 #endif
25 using namespace llvm;
26 using namespace sys;
28 namespace {
30 bool IsMPROTECT() {
31 #if defined(__NetBSD__)
32 int mib[3];
33 int paxflags;
34 size_t len = sizeof(paxflags);
36 mib[0] = CTL_PROC;
37 mib[1] = getpid();
38 mib[2] = PROC_PID_PAXFLAGS;
40 if (sysctl(mib, 3, &paxflags, &len, NULL, 0) != 0)
41 err(EXIT_FAILURE, "sysctl");
43 return !!(paxflags & CTL_PROC_PAXFLAGS_MPROTECT);
44 #else
45 return false;
46 #endif
49 class MappedMemoryTest : public ::testing::TestWithParam<unsigned> {
50 public:
51 MappedMemoryTest() {
52 Flags = GetParam();
53 PageSize = sys::Process::getPageSizeEstimate();
56 protected:
57 // Adds RW flags to permit testing of the resulting memory
58 unsigned getTestableEquivalent(unsigned RequestedFlags) {
59 switch (RequestedFlags) {
60 case Memory::MF_READ:
61 case Memory::MF_WRITE:
62 case Memory::MF_READ|Memory::MF_WRITE:
63 return Memory::MF_READ|Memory::MF_WRITE;
64 case Memory::MF_READ|Memory::MF_EXEC:
65 case Memory::MF_READ|Memory::MF_WRITE|Memory::MF_EXEC:
66 case Memory::MF_EXEC:
67 return Memory::MF_READ|Memory::MF_WRITE|Memory::MF_EXEC;
69 // Default in case values are added to the enum, as required by some compilers
70 return Memory::MF_READ|Memory::MF_WRITE;
73 // Returns true if the memory blocks overlap
74 bool doesOverlap(MemoryBlock M1, MemoryBlock M2) {
75 if (M1.base() == M2.base())
76 return true;
78 if (M1.base() > M2.base())
79 return (unsigned char *)M2.base() + M2.allocatedSize() > M1.base();
81 return (unsigned char *)M1.base() + M1.allocatedSize() > M2.base();
84 unsigned Flags;
85 size_t PageSize;
88 // MPROTECT prevents W+X mmaps
89 #define CHECK_UNSUPPORTED() \
90 do { \
91 if ((Flags & Memory::MF_WRITE) && (Flags & Memory::MF_EXEC) && \
92 IsMPROTECT()) \
93 return; \
94 } while (0)
96 TEST_P(MappedMemoryTest, AllocAndRelease) {
97 CHECK_UNSUPPORTED();
98 std::error_code EC;
99 MemoryBlock M1 = Memory::allocateMappedMemory(sizeof(int), nullptr, Flags,EC);
100 EXPECT_EQ(std::error_code(), EC);
102 EXPECT_NE((void*)nullptr, M1.base());
103 EXPECT_LE(sizeof(int), M1.allocatedSize());
105 EXPECT_FALSE(Memory::releaseMappedMemory(M1));
108 TEST_P(MappedMemoryTest, AllocAndReleaseHuge) {
109 CHECK_UNSUPPORTED();
110 std::error_code EC;
111 MemoryBlock M1 = Memory::allocateMappedMemory(
112 sizeof(int), nullptr, Flags | Memory::MF_HUGE_HINT, EC);
113 EXPECT_EQ(std::error_code(), EC);
115 // Test large/huge memory pages. In the worst case, 4kb pages should be
116 // returned, if large pages aren't available.
118 EXPECT_NE((void *)nullptr, M1.base());
119 EXPECT_LE(sizeof(int), M1.allocatedSize());
121 EXPECT_FALSE(Memory::releaseMappedMemory(M1));
124 TEST_P(MappedMemoryTest, MultipleAllocAndRelease) {
125 CHECK_UNSUPPORTED();
126 std::error_code EC;
127 MemoryBlock M1 = Memory::allocateMappedMemory(16, nullptr, Flags, EC);
128 EXPECT_EQ(std::error_code(), EC);
129 MemoryBlock M2 = Memory::allocateMappedMemory(64, nullptr, Flags, EC);
130 EXPECT_EQ(std::error_code(), EC);
131 MemoryBlock M3 = Memory::allocateMappedMemory(32, nullptr, Flags, EC);
132 EXPECT_EQ(std::error_code(), EC);
134 EXPECT_NE((void*)nullptr, M1.base());
135 EXPECT_LE(16U, M1.allocatedSize());
136 EXPECT_NE((void*)nullptr, M2.base());
137 EXPECT_LE(64U, M2.allocatedSize());
138 EXPECT_NE((void*)nullptr, M3.base());
139 EXPECT_LE(32U, M3.allocatedSize());
141 EXPECT_FALSE(doesOverlap(M1, M2));
142 EXPECT_FALSE(doesOverlap(M2, M3));
143 EXPECT_FALSE(doesOverlap(M1, M3));
145 EXPECT_FALSE(Memory::releaseMappedMemory(M1));
146 EXPECT_FALSE(Memory::releaseMappedMemory(M3));
147 MemoryBlock M4 = Memory::allocateMappedMemory(16, nullptr, Flags, EC);
148 EXPECT_EQ(std::error_code(), EC);
149 EXPECT_NE((void*)nullptr, M4.base());
150 EXPECT_LE(16U, M4.allocatedSize());
151 EXPECT_FALSE(Memory::releaseMappedMemory(M4));
152 EXPECT_FALSE(Memory::releaseMappedMemory(M2));
155 TEST_P(MappedMemoryTest, BasicWrite) {
156 // This test applies only to readable and writeable combinations
157 if (Flags &&
158 !((Flags & Memory::MF_READ) && (Flags & Memory::MF_WRITE)))
159 return;
160 CHECK_UNSUPPORTED();
162 std::error_code EC;
163 MemoryBlock M1 = Memory::allocateMappedMemory(sizeof(int), nullptr, Flags,EC);
164 EXPECT_EQ(std::error_code(), EC);
166 EXPECT_NE((void*)nullptr, M1.base());
167 EXPECT_LE(sizeof(int), M1.allocatedSize());
169 int *a = (int*)M1.base();
170 *a = 1;
171 EXPECT_EQ(1, *a);
173 EXPECT_FALSE(Memory::releaseMappedMemory(M1));
176 TEST_P(MappedMemoryTest, MultipleWrite) {
177 // This test applies only to readable and writeable combinations
178 if (Flags &&
179 !((Flags & Memory::MF_READ) && (Flags & Memory::MF_WRITE)))
180 return;
181 CHECK_UNSUPPORTED();
183 std::error_code EC;
184 MemoryBlock M1 = Memory::allocateMappedMemory(sizeof(int), nullptr, Flags,
185 EC);
186 EXPECT_EQ(std::error_code(), EC);
187 MemoryBlock M2 = Memory::allocateMappedMemory(8 * sizeof(int), nullptr, Flags,
188 EC);
189 EXPECT_EQ(std::error_code(), EC);
190 MemoryBlock M3 = Memory::allocateMappedMemory(4 * sizeof(int), nullptr, Flags,
191 EC);
192 EXPECT_EQ(std::error_code(), EC);
194 EXPECT_FALSE(doesOverlap(M1, M2));
195 EXPECT_FALSE(doesOverlap(M2, M3));
196 EXPECT_FALSE(doesOverlap(M1, M3));
198 EXPECT_NE((void*)nullptr, M1.base());
199 EXPECT_LE(1U * sizeof(int), M1.allocatedSize());
200 EXPECT_NE((void*)nullptr, M2.base());
201 EXPECT_LE(8U * sizeof(int), M2.allocatedSize());
202 EXPECT_NE((void*)nullptr, M3.base());
203 EXPECT_LE(4U * sizeof(int), M3.allocatedSize());
205 int *x = (int*)M1.base();
206 *x = 1;
208 int *y = (int*)M2.base();
209 for (int i = 0; i < 8; i++) {
210 y[i] = i;
213 int *z = (int*)M3.base();
214 *z = 42;
216 EXPECT_EQ(1, *x);
217 EXPECT_EQ(7, y[7]);
218 EXPECT_EQ(42, *z);
220 EXPECT_FALSE(Memory::releaseMappedMemory(M1));
221 EXPECT_FALSE(Memory::releaseMappedMemory(M3));
223 MemoryBlock M4 = Memory::allocateMappedMemory(64 * sizeof(int), nullptr,
224 Flags, EC);
225 EXPECT_EQ(std::error_code(), EC);
226 EXPECT_NE((void*)nullptr, M4.base());
227 EXPECT_LE(64U * sizeof(int), M4.allocatedSize());
228 x = (int*)M4.base();
229 *x = 4;
230 EXPECT_EQ(4, *x);
231 EXPECT_FALSE(Memory::releaseMappedMemory(M4));
233 // Verify that M2 remains unaffected by other activity
234 for (int i = 0; i < 8; i++) {
235 EXPECT_EQ(i, y[i]);
237 EXPECT_FALSE(Memory::releaseMappedMemory(M2));
240 TEST_P(MappedMemoryTest, EnabledWrite) {
241 // MPROTECT prevents W+X, and since this test always adds W we need
242 // to block any variant with X.
243 if ((Flags & Memory::MF_EXEC) && IsMPROTECT())
244 return;
246 std::error_code EC;
247 MemoryBlock M1 = Memory::allocateMappedMemory(2 * sizeof(int), nullptr, Flags,
248 EC);
249 EXPECT_EQ(std::error_code(), EC);
250 MemoryBlock M2 = Memory::allocateMappedMemory(8 * sizeof(int), nullptr, Flags,
251 EC);
252 EXPECT_EQ(std::error_code(), EC);
253 MemoryBlock M3 = Memory::allocateMappedMemory(4 * sizeof(int), nullptr, Flags,
254 EC);
255 EXPECT_EQ(std::error_code(), EC);
257 EXPECT_NE((void*)nullptr, M1.base());
258 EXPECT_LE(2U * sizeof(int), M1.allocatedSize());
259 EXPECT_NE((void*)nullptr, M2.base());
260 EXPECT_LE(8U * sizeof(int), M2.allocatedSize());
261 EXPECT_NE((void*)nullptr, M3.base());
262 EXPECT_LE(4U * sizeof(int), M3.allocatedSize());
264 EXPECT_FALSE(Memory::protectMappedMemory(M1, getTestableEquivalent(Flags)));
265 EXPECT_FALSE(Memory::protectMappedMemory(M2, getTestableEquivalent(Flags)));
266 EXPECT_FALSE(Memory::protectMappedMemory(M3, getTestableEquivalent(Flags)));
268 EXPECT_FALSE(doesOverlap(M1, M2));
269 EXPECT_FALSE(doesOverlap(M2, M3));
270 EXPECT_FALSE(doesOverlap(M1, M3));
272 int *x = (int*)M1.base();
273 *x = 1;
274 int *y = (int*)M2.base();
275 for (unsigned int i = 0; i < 8; i++) {
276 y[i] = i;
278 int *z = (int*)M3.base();
279 *z = 42;
281 EXPECT_EQ(1, *x);
282 EXPECT_EQ(7, y[7]);
283 EXPECT_EQ(42, *z);
285 EXPECT_FALSE(Memory::releaseMappedMemory(M1));
286 EXPECT_FALSE(Memory::releaseMappedMemory(M3));
287 EXPECT_EQ(6, y[6]);
289 MemoryBlock M4 = Memory::allocateMappedMemory(16, nullptr, Flags, EC);
290 EXPECT_EQ(std::error_code(), EC);
291 EXPECT_NE((void*)nullptr, M4.base());
292 EXPECT_LE(16U, M4.allocatedSize());
293 EXPECT_EQ(std::error_code(),
294 Memory::protectMappedMemory(M4, getTestableEquivalent(Flags)));
295 x = (int*)M4.base();
296 *x = 4;
297 EXPECT_EQ(4, *x);
298 EXPECT_FALSE(Memory::releaseMappedMemory(M4));
299 EXPECT_FALSE(Memory::releaseMappedMemory(M2));
302 TEST_P(MappedMemoryTest, SuccessiveNear) {
303 CHECK_UNSUPPORTED();
304 std::error_code EC;
305 MemoryBlock M1 = Memory::allocateMappedMemory(16, nullptr, Flags, EC);
306 EXPECT_EQ(std::error_code(), EC);
307 MemoryBlock M2 = Memory::allocateMappedMemory(64, &M1, Flags, EC);
308 EXPECT_EQ(std::error_code(), EC);
309 MemoryBlock M3 = Memory::allocateMappedMemory(32, &M2, Flags, EC);
310 EXPECT_EQ(std::error_code(), EC);
312 EXPECT_NE((void*)nullptr, M1.base());
313 EXPECT_LE(16U, M1.allocatedSize());
314 EXPECT_NE((void*)nullptr, M2.base());
315 EXPECT_LE(64U, M2.allocatedSize());
316 EXPECT_NE((void*)nullptr, M3.base());
317 EXPECT_LE(32U, M3.allocatedSize());
319 EXPECT_FALSE(doesOverlap(M1, M2));
320 EXPECT_FALSE(doesOverlap(M2, M3));
321 EXPECT_FALSE(doesOverlap(M1, M3));
323 EXPECT_FALSE(Memory::releaseMappedMemory(M1));
324 EXPECT_FALSE(Memory::releaseMappedMemory(M3));
325 EXPECT_FALSE(Memory::releaseMappedMemory(M2));
328 TEST_P(MappedMemoryTest, DuplicateNear) {
329 CHECK_UNSUPPORTED();
330 std::error_code EC;
331 MemoryBlock Near((void*)(3*PageSize), 16);
332 MemoryBlock M1 = Memory::allocateMappedMemory(16, &Near, Flags, EC);
333 EXPECT_EQ(std::error_code(), EC);
334 MemoryBlock M2 = Memory::allocateMappedMemory(64, &Near, Flags, EC);
335 EXPECT_EQ(std::error_code(), EC);
336 MemoryBlock M3 = Memory::allocateMappedMemory(32, &Near, Flags, EC);
337 EXPECT_EQ(std::error_code(), EC);
339 EXPECT_NE((void*)nullptr, M1.base());
340 EXPECT_LE(16U, M1.allocatedSize());
341 EXPECT_NE((void*)nullptr, M2.base());
342 EXPECT_LE(64U, M2.allocatedSize());
343 EXPECT_NE((void*)nullptr, M3.base());
344 EXPECT_LE(32U, M3.allocatedSize());
346 EXPECT_FALSE(Memory::releaseMappedMemory(M1));
347 EXPECT_FALSE(Memory::releaseMappedMemory(M3));
348 EXPECT_FALSE(Memory::releaseMappedMemory(M2));
351 TEST_P(MappedMemoryTest, ZeroNear) {
352 CHECK_UNSUPPORTED();
353 std::error_code EC;
354 MemoryBlock Near(nullptr, 0);
355 MemoryBlock M1 = Memory::allocateMappedMemory(16, &Near, Flags, EC);
356 EXPECT_EQ(std::error_code(), EC);
357 MemoryBlock M2 = Memory::allocateMappedMemory(64, &Near, Flags, EC);
358 EXPECT_EQ(std::error_code(), EC);
359 MemoryBlock M3 = Memory::allocateMappedMemory(32, &Near, Flags, EC);
360 EXPECT_EQ(std::error_code(), EC);
362 EXPECT_NE((void*)nullptr, M1.base());
363 EXPECT_LE(16U, M1.allocatedSize());
364 EXPECT_NE((void*)nullptr, M2.base());
365 EXPECT_LE(64U, M2.allocatedSize());
366 EXPECT_NE((void*)nullptr, M3.base());
367 EXPECT_LE(32U, M3.allocatedSize());
369 EXPECT_FALSE(doesOverlap(M1, M2));
370 EXPECT_FALSE(doesOverlap(M2, M3));
371 EXPECT_FALSE(doesOverlap(M1, M3));
373 EXPECT_FALSE(Memory::releaseMappedMemory(M1));
374 EXPECT_FALSE(Memory::releaseMappedMemory(M3));
375 EXPECT_FALSE(Memory::releaseMappedMemory(M2));
378 TEST_P(MappedMemoryTest, ZeroSizeNear) {
379 CHECK_UNSUPPORTED();
380 std::error_code EC;
381 MemoryBlock Near((void*)(4*PageSize), 0);
382 MemoryBlock M1 = Memory::allocateMappedMemory(16, &Near, Flags, EC);
383 EXPECT_EQ(std::error_code(), EC);
384 MemoryBlock M2 = Memory::allocateMappedMemory(64, &Near, Flags, EC);
385 EXPECT_EQ(std::error_code(), EC);
386 MemoryBlock M3 = Memory::allocateMappedMemory(32, &Near, Flags, EC);
387 EXPECT_EQ(std::error_code(), EC);
389 EXPECT_NE((void*)nullptr, M1.base());
390 EXPECT_LE(16U, M1.allocatedSize());
391 EXPECT_NE((void*)nullptr, M2.base());
392 EXPECT_LE(64U, M2.allocatedSize());
393 EXPECT_NE((void*)nullptr, M3.base());
394 EXPECT_LE(32U, M3.allocatedSize());
396 EXPECT_FALSE(doesOverlap(M1, M2));
397 EXPECT_FALSE(doesOverlap(M2, M3));
398 EXPECT_FALSE(doesOverlap(M1, M3));
400 EXPECT_FALSE(Memory::releaseMappedMemory(M1));
401 EXPECT_FALSE(Memory::releaseMappedMemory(M3));
402 EXPECT_FALSE(Memory::releaseMappedMemory(M2));
405 TEST_P(MappedMemoryTest, UnalignedNear) {
406 CHECK_UNSUPPORTED();
407 std::error_code EC;
408 MemoryBlock Near((void*)(2*PageSize+5), 0);
409 MemoryBlock M1 = Memory::allocateMappedMemory(15, &Near, Flags, EC);
410 EXPECT_EQ(std::error_code(), EC);
412 EXPECT_NE((void*)nullptr, M1.base());
413 EXPECT_LE(sizeof(int), M1.allocatedSize());
415 EXPECT_FALSE(Memory::releaseMappedMemory(M1));
418 // Note that Memory::MF_WRITE is not supported exclusively across
419 // operating systems and architectures and can imply MF_READ|MF_WRITE
420 unsigned MemoryFlags[] = {
421 Memory::MF_READ,
422 Memory::MF_WRITE,
423 Memory::MF_READ|Memory::MF_WRITE,
424 Memory::MF_EXEC,
425 Memory::MF_READ|Memory::MF_EXEC,
426 Memory::MF_READ|Memory::MF_WRITE|Memory::MF_EXEC
429 INSTANTIATE_TEST_CASE_P(AllocationTests,
430 MappedMemoryTest,
431 ::testing::ValuesIn(MemoryFlags),);
433 } // anonymous namespace