[Reland][Runtimes] Merge 'compile_commands.json' files from runtimes build (#116303)
[llvm-project.git] / compiler-rt / lib / sanitizer_common / sanitizer_stack_store.h
blob4f1a8caac6ed85c5aff8a5967c7e29df3b4d5ad3
1 //===-- sanitizer_stack_store.h ---------------------------------*- C++ -*-===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
9 #ifndef SANITIZER_STACK_STORE_H
10 #define SANITIZER_STACK_STORE_H
12 #include "sanitizer_atomic.h"
13 #include "sanitizer_common.h"
14 #include "sanitizer_internal_defs.h"
15 #include "sanitizer_mutex.h"
16 #include "sanitizer_stacktrace.h"
18 namespace __sanitizer {
20 class StackStore {
21 static constexpr uptr kBlockSizeFrames = 0x100000;
22 static constexpr uptr kBlockCount = 0x1000;
23 static constexpr uptr kBlockSizeBytes = kBlockSizeFrames * sizeof(uptr);
25 public:
26 enum class Compression : u8 {
27 None = 0,
28 Delta,
29 LZW,
32 constexpr StackStore() = default;
34 using Id = u32; // Enough for 2^32 * sizeof(uptr) bytes of traces.
35 static_assert(u64(kBlockCount) * kBlockSizeFrames == 1ull << (sizeof(Id) * 8),
36 "");
38 Id Store(const StackTrace &trace,
39 uptr *pack /* number of blocks completed by this call */);
40 StackTrace Load(Id id);
41 uptr Allocated() const;
43 // Packs all blocks which don't expect any more writes. A block is going to be
44 // packed once. As soon trace from that block was requested, it will unpack
45 // and stay unpacked after that.
46 // Returns the number of released bytes.
47 uptr Pack(Compression type);
49 void LockAll();
50 void UnlockAll();
52 void TestOnlyUnmap();
54 private:
55 friend class StackStoreTest;
56 static constexpr uptr GetBlockIdx(uptr frame_idx) {
57 return frame_idx / kBlockSizeFrames;
60 static constexpr uptr GetInBlockIdx(uptr frame_idx) {
61 return frame_idx % kBlockSizeFrames;
64 static constexpr uptr IdToOffset(Id id) {
65 CHECK_NE(id, 0);
66 return id - 1; // Avoid zero as id.
69 static constexpr uptr OffsetToId(Id id) {
70 // This makes UINT32_MAX to 0 and it will be retrived as and empty stack.
71 // But this is not a problem as we will not be able to store anything after
72 // that anyway.
73 return id + 1; // Avoid zero as id.
76 uptr *Alloc(uptr count, uptr *idx, uptr *pack);
78 void *Map(uptr size, const char *mem_type);
79 void Unmap(void *addr, uptr size);
81 // Total number of allocated frames.
82 atomic_uintptr_t total_frames_ = {};
84 // Tracks total allocated memory in bytes.
85 atomic_uintptr_t allocated_ = {};
87 // Each block will hold pointer to exactly kBlockSizeFrames.
88 class BlockInfo {
89 atomic_uintptr_t data_;
90 // Counter to track store progress to know when we can Pack() the block.
91 atomic_uint32_t stored_;
92 // Protects alloc of new blocks.
93 mutable StaticSpinMutex mtx_;
95 enum class State : u8 {
96 Storing = 0,
97 Packed,
98 Unpacked,
100 State state SANITIZER_GUARDED_BY(mtx_);
102 uptr *Create(StackStore *store);
104 public:
105 uptr *Get() const;
106 uptr *GetOrCreate(StackStore *store);
107 uptr *GetOrUnpack(StackStore *store);
108 uptr Pack(Compression type, StackStore *store);
109 void TestOnlyUnmap(StackStore *store);
110 bool Stored(uptr n);
111 bool IsPacked() const;
112 void Lock() SANITIZER_NO_THREAD_SAFETY_ANALYSIS { mtx_.Lock(); }
113 void Unlock() SANITIZER_NO_THREAD_SAFETY_ANALYSIS { mtx_.Unlock(); }
116 BlockInfo blocks_[kBlockCount] = {};
119 } // namespace __sanitizer
121 #endif // SANITIZER_STACK_STORE_H