1 //===-- sanitizer_stack_store_test.cpp --------------------------*- C++ -*-===//
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7 //===----------------------------------------------------------------------===//
8 #include "sanitizer_common/sanitizer_stack_store.h"
14 #include "gtest/gtest.h"
15 #include "sanitizer_atomic.h"
16 #include "sanitizer_hash.h"
17 #include "sanitizer_stacktrace.h"
19 namespace __sanitizer
{
21 class StackStoreTest
: public testing::Test
{
23 void SetUp() override
{}
24 void TearDown() override
{ store_
.TestOnlyUnmap(); }
26 template <typename Fn
>
27 void ForEachTrace(Fn fn
, uptr n
= 1000000) {
28 std::vector
<uptr
> frames(kStackTraceMax
);
29 std::iota(frames
.begin(), frames
.end(), 0x100000);
30 MurMur2HashBuilder
h(0);
31 for (uptr i
= 0; i
< n
; ++i
) {
33 u32 size
= h
.get() % kStackTraceMax
;
35 uptr tag
= h
.get() % 256;
36 StackTrace
s(frames
.data(), size
, tag
);
37 if (!s
.size
&& !s
.tag
)
42 std::next_permutation(frames
.begin(), frames
.end());
46 using BlockInfo
= StackStore::BlockInfo
;
48 uptr
GetTotalFramesCount() const {
49 return atomic_load_relaxed(&store_
.total_frames_
);
52 uptr
CountReadyToPackBlocks() {
54 for (BlockInfo
& b
: store_
.blocks_
) res
+= b
.Stored(0);
58 uptr
CountPackedBlocks() const {
60 for (const BlockInfo
& b
: store_
.blocks_
) res
+= b
.IsPacked();
64 uptr
IdToOffset(StackStore::Id id
) const { return store_
.IdToOffset(id
); }
66 static constexpr uptr kBlockSizeFrames
= StackStore::kBlockSizeFrames
;
67 static constexpr uptr kBlockSizeBytes
= StackStore::kBlockSizeBytes
;
69 StackStore store_
= {};
72 TEST_F(StackStoreTest
, Empty
) {
73 uptr before
= store_
.Allocated();
75 EXPECT_EQ(0u, store_
.Store({}, &pack
));
76 uptr after
= store_
.Allocated();
77 EXPECT_EQ(before
, after
);
80 TEST_F(StackStoreTest
, Basic
) {
81 std::vector
<StackStore::Id
> ids
;
82 ForEachTrace([&](const StackTrace
& s
) {
84 ids
.push_back(store_
.Store(s
, &pack
));
87 auto id
= ids
.begin();
88 ForEachTrace([&](const StackTrace
& s
) {
89 StackTrace trace
= store_
.Load(*(id
++));
90 EXPECT_EQ(s
.size
, trace
.size
);
91 EXPECT_EQ(s
.tag
, trace
.tag
);
92 EXPECT_EQ(std::vector
<uptr
>(s
.trace
, s
.trace
+ s
.size
),
93 std::vector
<uptr
>(trace
.trace
, trace
.trace
+ trace
.size
));
97 TEST_F(StackStoreTest
, Allocated
) {
98 EXPECT_LE(store_
.Allocated(), 0x100000u
);
99 std::vector
<StackStore::Id
> ids
;
100 ForEachTrace([&](const StackTrace
& s
) {
102 ids
.push_back(store_
.Store(s
, &pack
));
104 EXPECT_NEAR(store_
.Allocated(), FIRST_32_SECOND_64(500000000u, 1000000000u),
105 FIRST_32_SECOND_64(50000000u, 100000000u));
106 store_
.TestOnlyUnmap();
107 EXPECT_LE(store_
.Allocated(), 0x100000u
);
110 TEST_F(StackStoreTest
, ReadyToPack
) {
111 uptr next_pack
= kBlockSizeFrames
;
112 uptr total_ready
= 0;
114 [&](const StackTrace
& s
) {
116 StackStore::Id id
= store_
.Store(s
, &pack
);
117 uptr end_idx
= IdToOffset(id
) + 1 + s
.size
;
118 if (end_idx
>= next_pack
) {
120 next_pack
+= kBlockSizeFrames
;
125 EXPECT_EQ(CountReadyToPackBlocks(), total_ready
);
128 EXPECT_EQ(GetTotalFramesCount() / kBlockSizeFrames
, total_ready
);
131 struct StackStorePackTest
: public StackStoreTest
,
132 public ::testing::WithParamInterface
<
133 std::pair
<StackStore::Compression
, uptr
>> {};
135 INSTANTIATE_TEST_SUITE_P(
136 PackUnpacks
, StackStorePackTest
,
137 ::testing::ValuesIn({
138 StackStorePackTest::ParamType(StackStore::Compression::Delta
,
139 FIRST_32_SECOND_64(2, 6)),
140 StackStorePackTest::ParamType(StackStore::Compression::LZW
,
141 FIRST_32_SECOND_64(60, 125)),
144 TEST_P(StackStorePackTest
, PackUnpack
) {
145 std::vector
<StackStore::Id
> ids
;
146 StackStore::Compression type
= GetParam().first
;
147 uptr expected_ratio
= GetParam().second
;
148 ForEachTrace([&](const StackTrace
& s
) {
150 ids
.push_back(store_
.Store(s
, &pack
));
152 uptr before
= store_
.Allocated();
153 uptr diff
= store_
.Pack(type
);
154 uptr after
= store_
.Allocated();
155 EXPECT_EQ(before
- after
, diff
);
156 EXPECT_LT(after
, before
);
157 EXPECT_GE(kBlockSizeBytes
/ (kBlockSizeBytes
- (before
- after
)),
161 uptr packed_blocks
= CountPackedBlocks();
162 // Unpack random block.
163 store_
.Load(kBlockSizeFrames
* 7 + 123);
164 EXPECT_EQ(packed_blocks
- 1, CountPackedBlocks());
166 // Unpack all blocks.
167 auto id
= ids
.begin();
168 ForEachTrace([&](const StackTrace
& s
) {
169 StackTrace trace
= store_
.Load(*(id
++));
170 EXPECT_EQ(s
.size
, trace
.size
);
171 EXPECT_EQ(s
.tag
, trace
.tag
);
172 EXPECT_EQ(std::vector
<uptr
>(s
.trace
, s
.trace
+ s
.size
),
173 std::vector
<uptr
>(trace
.trace
, trace
.trace
+ trace
.size
));
175 EXPECT_EQ(0u, CountPackedBlocks());
177 EXPECT_EQ(0u, store_
.Pack(type
));
178 EXPECT_EQ(0u, CountPackedBlocks());
181 TEST_P(StackStorePackTest
, Failed
) {
182 MurMur2Hash64Builder
h(0);
183 StackStore::Compression type
= GetParam().first
;
184 std::vector
<uptr
> frames(200);
185 for (uptr i
= 0; i
< kBlockSizeFrames
* 4 / frames
.size(); ++i
) {
186 for (uptr
& f
: frames
) {
188 // Make it difficult to pack.
192 store_
.Store(StackTrace(frames
.data(), frames
.size()), &pack
);
194 EXPECT_EQ(0u, store_
.Pack(type
));
197 EXPECT_EQ(0u, CountPackedBlocks());
200 } // namespace __sanitizer