Revert "[InstCombine] Support gep nuw in icmp folds" (#118698)
[llvm-project.git] / compiler-rt / lib / sanitizer_common / tests / sanitizer_stack_store_test.cpp
blob57be1c9b7f186425f550b5a7e0ed2b7013edd253
1 //===-- sanitizer_stack_store_test.cpp --------------------------*- C++ -*-===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 #include "sanitizer_common/sanitizer_stack_store.h"
10 #include <algorithm>
11 #include <numeric>
12 #include <vector>
14 #include "gtest/gtest.h"
15 #include "sanitizer_atomic.h"
16 #include "sanitizer_hash.h"
17 #include "sanitizer_stacktrace.h"
19 namespace __sanitizer {
21 class StackStoreTest : public testing::Test {
22 protected:
23 void SetUp() override {}
24 void TearDown() override { store_.TestOnlyUnmap(); }
26 template <typename Fn>
27 void ForEachTrace(Fn fn, uptr n = 1000000) {
28 std::vector<uptr> frames(kStackTraceMax);
29 std::iota(frames.begin(), frames.end(), 0x100000);
30 MurMur2HashBuilder h(0);
31 for (uptr i = 0; i < n; ++i) {
32 h.add(i);
33 u32 size = h.get() % kStackTraceMax;
34 h.add(i);
35 uptr tag = h.get() % 256;
36 StackTrace s(frames.data(), size, tag);
37 if (!s.size && !s.tag)
38 continue;
39 fn(s);
40 if (HasFailure())
41 return;
42 std::next_permutation(frames.begin(), frames.end());
46 using BlockInfo = StackStore::BlockInfo;
48 uptr GetTotalFramesCount() const {
49 return atomic_load_relaxed(&store_.total_frames_);
52 uptr CountReadyToPackBlocks() {
53 uptr res = 0;
54 for (BlockInfo& b : store_.blocks_) res += b.Stored(0);
55 return res;
58 uptr CountPackedBlocks() const {
59 uptr res = 0;
60 for (const BlockInfo& b : store_.blocks_) res += b.IsPacked();
61 return res;
64 uptr IdToOffset(StackStore::Id id) const { return store_.IdToOffset(id); }
66 static constexpr uptr kBlockSizeFrames = StackStore::kBlockSizeFrames;
67 static constexpr uptr kBlockSizeBytes = StackStore::kBlockSizeBytes;
69 StackStore store_ = {};
72 TEST_F(StackStoreTest, Empty) {
73 uptr before = store_.Allocated();
74 uptr pack = 0;
75 EXPECT_EQ(0u, store_.Store({}, &pack));
76 uptr after = store_.Allocated();
77 EXPECT_EQ(before, after);
80 TEST_F(StackStoreTest, Basic) {
81 std::vector<StackStore::Id> ids;
82 ForEachTrace([&](const StackTrace& s) {
83 uptr pack = 0;
84 ids.push_back(store_.Store(s, &pack));
85 });
87 auto id = ids.begin();
88 ForEachTrace([&](const StackTrace& s) {
89 StackTrace trace = store_.Load(*(id++));
90 EXPECT_EQ(s.size, trace.size);
91 EXPECT_EQ(s.tag, trace.tag);
92 EXPECT_EQ(std::vector<uptr>(s.trace, s.trace + s.size),
93 std::vector<uptr>(trace.trace, trace.trace + trace.size));
94 });
97 TEST_F(StackStoreTest, Allocated) {
98 EXPECT_LE(store_.Allocated(), 0x100000u);
99 std::vector<StackStore::Id> ids;
100 ForEachTrace([&](const StackTrace& s) {
101 uptr pack = 0;
102 ids.push_back(store_.Store(s, &pack));
104 EXPECT_NEAR(store_.Allocated(), FIRST_32_SECOND_64(500000000u, 1000000000u),
105 FIRST_32_SECOND_64(50000000u, 100000000u));
106 store_.TestOnlyUnmap();
107 EXPECT_LE(store_.Allocated(), 0x100000u);
110 TEST_F(StackStoreTest, ReadyToPack) {
111 uptr next_pack = kBlockSizeFrames;
112 uptr total_ready = 0;
113 ForEachTrace(
114 [&](const StackTrace& s) {
115 uptr pack = 0;
116 StackStore::Id id = store_.Store(s, &pack);
117 uptr end_idx = IdToOffset(id) + 1 + s.size;
118 if (end_idx >= next_pack) {
119 EXPECT_EQ(1u, pack);
120 next_pack += kBlockSizeFrames;
121 } else {
122 EXPECT_EQ(0u, pack);
124 total_ready += pack;
125 EXPECT_EQ(CountReadyToPackBlocks(), total_ready);
127 100000);
128 EXPECT_EQ(GetTotalFramesCount() / kBlockSizeFrames, total_ready);
131 struct StackStorePackTest : public StackStoreTest,
132 public ::testing::WithParamInterface<
133 std::pair<StackStore::Compression, uptr>> {};
135 INSTANTIATE_TEST_SUITE_P(
136 PackUnpacks, StackStorePackTest,
137 ::testing::ValuesIn({
138 StackStorePackTest::ParamType(StackStore::Compression::Delta,
139 FIRST_32_SECOND_64(2, 6)),
140 StackStorePackTest::ParamType(StackStore::Compression::LZW,
141 FIRST_32_SECOND_64(60, 125)),
142 }));
144 TEST_P(StackStorePackTest, PackUnpack) {
145 std::vector<StackStore::Id> ids;
146 StackStore::Compression type = GetParam().first;
147 uptr expected_ratio = GetParam().second;
148 ForEachTrace([&](const StackTrace& s) {
149 uptr pack = 0;
150 ids.push_back(store_.Store(s, &pack));
151 if (pack) {
152 uptr before = store_.Allocated();
153 uptr diff = store_.Pack(type);
154 uptr after = store_.Allocated();
155 EXPECT_EQ(before - after, diff);
156 EXPECT_LT(after, before);
157 EXPECT_GE(kBlockSizeBytes / (kBlockSizeBytes - (before - after)),
158 expected_ratio);
161 uptr packed_blocks = CountPackedBlocks();
162 // Unpack random block.
163 store_.Load(kBlockSizeFrames * 7 + 123);
164 EXPECT_EQ(packed_blocks - 1, CountPackedBlocks());
166 // Unpack all blocks.
167 auto id = ids.begin();
168 ForEachTrace([&](const StackTrace& s) {
169 StackTrace trace = store_.Load(*(id++));
170 EXPECT_EQ(s.size, trace.size);
171 EXPECT_EQ(s.tag, trace.tag);
172 EXPECT_EQ(std::vector<uptr>(s.trace, s.trace + s.size),
173 std::vector<uptr>(trace.trace, trace.trace + trace.size));
175 EXPECT_EQ(0u, CountPackedBlocks());
177 EXPECT_EQ(0u, store_.Pack(type));
178 EXPECT_EQ(0u, CountPackedBlocks());
181 TEST_P(StackStorePackTest, Failed) {
182 MurMur2Hash64Builder h(0);
183 StackStore::Compression type = GetParam().first;
184 std::vector<uptr> frames(200);
185 for (uptr i = 0; i < kBlockSizeFrames * 4 / frames.size(); ++i) {
186 for (uptr& f : frames) {
187 h.add(1);
188 // Make it difficult to pack.
189 f = h.get();
191 uptr pack = 0;
192 store_.Store(StackTrace(frames.data(), frames.size()), &pack);
193 if (pack)
194 EXPECT_EQ(0u, store_.Pack(type));
197 EXPECT_EQ(0u, CountPackedBlocks());
200 } // namespace __sanitizer