[DominatorTree] Add support for mixed pre/post CFG views.
[llvm-project.git] / compiler-rt / lib / msan / msan.h
blobe794c7c15f8951a61f1223733e28d880d6aea561
1 //===-- msan.h --------------------------------------------------*- C++ -*-===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file is a part of MemorySanitizer.
11 // Private MSan header.
12 //===----------------------------------------------------------------------===//
14 #ifndef MSAN_H
15 #define MSAN_H
17 #include "sanitizer_common/sanitizer_flags.h"
18 #include "sanitizer_common/sanitizer_internal_defs.h"
19 #include "sanitizer_common/sanitizer_stacktrace.h"
20 #include "msan_interface_internal.h"
21 #include "msan_flags.h"
22 #include "ubsan/ubsan_platform.h"
24 #ifndef MSAN_REPLACE_OPERATORS_NEW_AND_DELETE
25 # define MSAN_REPLACE_OPERATORS_NEW_AND_DELETE 1
26 #endif
28 #ifndef MSAN_CONTAINS_UBSAN
29 # define MSAN_CONTAINS_UBSAN CAN_SANITIZE_UB
30 #endif
32 struct MappingDesc {
33 uptr start;
34 uptr end;
35 enum Type {
36 INVALID, APP, SHADOW, ORIGIN
37 } type;
38 const char *name;
42 #if SANITIZER_LINUX && defined(__mips64)
44 // MIPS64 maps:
45 // - 0x0000000000-0x0200000000: Program own segments
46 // - 0xa200000000-0xc000000000: PIE program segments
47 // - 0xe200000000-0xffffffffff: libraries segments.
48 const MappingDesc kMemoryLayout[] = {
49 {0x000000000000ULL, 0x000200000000ULL, MappingDesc::APP, "app-1"},
50 {0x000200000000ULL, 0x002200000000ULL, MappingDesc::INVALID, "invalid"},
51 {0x002200000000ULL, 0x004000000000ULL, MappingDesc::SHADOW, "shadow-2"},
52 {0x004000000000ULL, 0x004200000000ULL, MappingDesc::INVALID, "invalid"},
53 {0x004200000000ULL, 0x006000000000ULL, MappingDesc::ORIGIN, "origin-2"},
54 {0x006000000000ULL, 0x006200000000ULL, MappingDesc::INVALID, "invalid"},
55 {0x006200000000ULL, 0x008000000000ULL, MappingDesc::SHADOW, "shadow-3"},
56 {0x008000000000ULL, 0x008200000000ULL, MappingDesc::SHADOW, "shadow-1"},
57 {0x008200000000ULL, 0x00a000000000ULL, MappingDesc::ORIGIN, "origin-3"},
58 {0x00a000000000ULL, 0x00a200000000ULL, MappingDesc::ORIGIN, "origin-1"},
59 {0x00a200000000ULL, 0x00c000000000ULL, MappingDesc::APP, "app-2"},
60 {0x00c000000000ULL, 0x00e200000000ULL, MappingDesc::INVALID, "invalid"},
61 {0x00e200000000ULL, 0x00ffffffffffULL, MappingDesc::APP, "app-3"}};
63 #define MEM_TO_SHADOW(mem) (((uptr)(mem)) ^ 0x8000000000ULL)
64 #define SHADOW_TO_ORIGIN(shadow) (((uptr)(shadow)) + 0x2000000000ULL)
66 #elif SANITIZER_LINUX && defined(__aarch64__)
68 // The mapping describes both 39-bits, 42-bits, and 48-bits VMA. AArch64
69 // maps:
70 // - 0x0000000000000-0x0000010000000: 39/42/48-bits program own segments
71 // - 0x0005500000000-0x0005600000000: 39-bits PIE program segments
72 // - 0x0007f80000000-0x0007fffffffff: 39-bits libraries segments
73 // - 0x002aa00000000-0x002ab00000000: 42-bits PIE program segments
74 // - 0x003ff00000000-0x003ffffffffff: 42-bits libraries segments
75 // - 0x0aaaaa0000000-0x0aaab00000000: 48-bits PIE program segments
76 // - 0xffff000000000-0x1000000000000: 48-bits libraries segments
77 // It is fragmented in multiples segments to increase the memory available
78 // on 42-bits (12.21% of total VMA available for 42-bits and 13.28 for
79 // 39 bits). The 48-bits segments only cover the usual PIE/default segments
80 // plus some more segments (262144GB total, 0.39% total VMA).
81 const MappingDesc kMemoryLayout[] = {
82 {0x00000000000ULL, 0x01000000000ULL, MappingDesc::INVALID, "invalid"},
83 {0x01000000000ULL, 0x02000000000ULL, MappingDesc::SHADOW, "shadow-2"},
84 {0x02000000000ULL, 0x03000000000ULL, MappingDesc::ORIGIN, "origin-2"},
85 {0x03000000000ULL, 0x04000000000ULL, MappingDesc::SHADOW, "shadow-1"},
86 {0x04000000000ULL, 0x05000000000ULL, MappingDesc::ORIGIN, "origin-1"},
87 {0x05000000000ULL, 0x06000000000ULL, MappingDesc::APP, "app-1"},
88 {0x06000000000ULL, 0x07000000000ULL, MappingDesc::INVALID, "invalid"},
89 {0x07000000000ULL, 0x08000000000ULL, MappingDesc::APP, "app-2"},
90 {0x08000000000ULL, 0x09000000000ULL, MappingDesc::INVALID, "invalid"},
91 // The mappings below are used only for 42-bits VMA.
92 {0x09000000000ULL, 0x0A000000000ULL, MappingDesc::SHADOW, "shadow-3"},
93 {0x0A000000000ULL, 0x0B000000000ULL, MappingDesc::ORIGIN, "origin-3"},
94 {0x0B000000000ULL, 0x0F000000000ULL, MappingDesc::INVALID, "invalid"},
95 {0x0F000000000ULL, 0x10000000000ULL, MappingDesc::APP, "app-3"},
96 {0x10000000000ULL, 0x11000000000ULL, MappingDesc::INVALID, "invalid"},
97 {0x11000000000ULL, 0x12000000000ULL, MappingDesc::APP, "app-4"},
98 {0x12000000000ULL, 0x17000000000ULL, MappingDesc::INVALID, "invalid"},
99 {0x17000000000ULL, 0x18000000000ULL, MappingDesc::SHADOW, "shadow-4"},
100 {0x18000000000ULL, 0x19000000000ULL, MappingDesc::ORIGIN, "origin-4"},
101 {0x19000000000ULL, 0x20000000000ULL, MappingDesc::INVALID, "invalid"},
102 {0x20000000000ULL, 0x21000000000ULL, MappingDesc::APP, "app-5"},
103 {0x21000000000ULL, 0x26000000000ULL, MappingDesc::INVALID, "invalid"},
104 {0x26000000000ULL, 0x27000000000ULL, MappingDesc::SHADOW, "shadow-5"},
105 {0x27000000000ULL, 0x28000000000ULL, MappingDesc::ORIGIN, "origin-5"},
106 {0x28000000000ULL, 0x29000000000ULL, MappingDesc::SHADOW, "shadow-7"},
107 {0x29000000000ULL, 0x2A000000000ULL, MappingDesc::ORIGIN, "origin-7"},
108 {0x2A000000000ULL, 0x2B000000000ULL, MappingDesc::APP, "app-6"},
109 {0x2B000000000ULL, 0x2C000000000ULL, MappingDesc::INVALID, "invalid"},
110 {0x2C000000000ULL, 0x2D000000000ULL, MappingDesc::SHADOW, "shadow-6"},
111 {0x2D000000000ULL, 0x2E000000000ULL, MappingDesc::ORIGIN, "origin-6"},
112 {0x2E000000000ULL, 0x2F000000000ULL, MappingDesc::APP, "app-7"},
113 {0x2F000000000ULL, 0x39000000000ULL, MappingDesc::INVALID, "invalid"},
114 {0x39000000000ULL, 0x3A000000000ULL, MappingDesc::SHADOW, "shadow-9"},
115 {0x3A000000000ULL, 0x3B000000000ULL, MappingDesc::ORIGIN, "origin-9"},
116 {0x3B000000000ULL, 0x3C000000000ULL, MappingDesc::APP, "app-8"},
117 {0x3C000000000ULL, 0x3D000000000ULL, MappingDesc::INVALID, "invalid"},
118 {0x3D000000000ULL, 0x3E000000000ULL, MappingDesc::SHADOW, "shadow-8"},
119 {0x3E000000000ULL, 0x3F000000000ULL, MappingDesc::ORIGIN, "origin-8"},
120 {0x3F000000000ULL, 0x40000000000ULL, MappingDesc::APP, "app-9"},
121 // The mappings below are used only for 48-bits VMA.
122 // TODO(unknown): 48-bit mapping ony covers the usual PIE, non-PIE
123 // segments and some more segments totalizing 262144GB of VMA (which cover
124 // only 0.32% of all 48-bit VMA). Memory avaliability can be increase by
125 // adding multiple application segments like 39 and 42 mapping.
126 {0x0040000000000ULL, 0x0041000000000ULL, MappingDesc::INVALID, "invalid"},
127 {0x0041000000000ULL, 0x0042000000000ULL, MappingDesc::APP, "app-10"},
128 {0x0042000000000ULL, 0x0047000000000ULL, MappingDesc::INVALID, "invalid"},
129 {0x0047000000000ULL, 0x0048000000000ULL, MappingDesc::SHADOW, "shadow-10"},
130 {0x0048000000000ULL, 0x0049000000000ULL, MappingDesc::ORIGIN, "origin-10"},
131 {0x0049000000000ULL, 0x0050000000000ULL, MappingDesc::INVALID, "invalid"},
132 {0x0050000000000ULL, 0x0051000000000ULL, MappingDesc::APP, "app-11"},
133 {0x0051000000000ULL, 0x0056000000000ULL, MappingDesc::INVALID, "invalid"},
134 {0x0056000000000ULL, 0x0057000000000ULL, MappingDesc::SHADOW, "shadow-11"},
135 {0x0057000000000ULL, 0x0058000000000ULL, MappingDesc::ORIGIN, "origin-11"},
136 {0x0058000000000ULL, 0x0059000000000ULL, MappingDesc::APP, "app-12"},
137 {0x0059000000000ULL, 0x005E000000000ULL, MappingDesc::INVALID, "invalid"},
138 {0x005E000000000ULL, 0x005F000000000ULL, MappingDesc::SHADOW, "shadow-12"},
139 {0x005F000000000ULL, 0x0060000000000ULL, MappingDesc::ORIGIN, "origin-12"},
140 {0x0060000000000ULL, 0x0061000000000ULL, MappingDesc::INVALID, "invalid"},
141 {0x0061000000000ULL, 0x0062000000000ULL, MappingDesc::APP, "app-13"},
142 {0x0062000000000ULL, 0x0067000000000ULL, MappingDesc::INVALID, "invalid"},
143 {0x0067000000000ULL, 0x0068000000000ULL, MappingDesc::SHADOW, "shadow-13"},
144 {0x0068000000000ULL, 0x0069000000000ULL, MappingDesc::ORIGIN, "origin-13"},
145 {0x0069000000000ULL, 0x0AAAAA0000000ULL, MappingDesc::INVALID, "invalid"},
146 {0x0AAAAA0000000ULL, 0x0AAAB00000000ULL, MappingDesc::APP, "app-14"},
147 {0x0AAAB00000000ULL, 0x0AACAA0000000ULL, MappingDesc::INVALID, "invalid"},
148 {0x0AACAA0000000ULL, 0x0AACB00000000ULL, MappingDesc::SHADOW, "shadow-14"},
149 {0x0AACB00000000ULL, 0x0AADAA0000000ULL, MappingDesc::INVALID, "invalid"},
150 {0x0AADAA0000000ULL, 0x0AADB00000000ULL, MappingDesc::ORIGIN, "origin-14"},
151 {0x0AADB00000000ULL, 0x0FF9F00000000ULL, MappingDesc::INVALID, "invalid"},
152 {0x0FF9F00000000ULL, 0x0FFA000000000ULL, MappingDesc::SHADOW, "shadow-15"},
153 {0x0FFA000000000ULL, 0x0FFAF00000000ULL, MappingDesc::INVALID, "invalid"},
154 {0x0FFAF00000000ULL, 0x0FFB000000000ULL, MappingDesc::ORIGIN, "origin-15"},
155 {0x0FFB000000000ULL, 0x0FFFF00000000ULL, MappingDesc::INVALID, "invalid"},
156 {0x0FFFF00000000ULL, 0x1000000000000ULL, MappingDesc::APP, "app-15"},
158 # define MEM_TO_SHADOW(mem) ((uptr)mem ^ 0x6000000000ULL)
159 # define SHADOW_TO_ORIGIN(shadow) (((uptr)(shadow)) + 0x1000000000ULL)
161 #elif SANITIZER_LINUX && SANITIZER_PPC64
162 const MappingDesc kMemoryLayout[] = {
163 {0x000000000000ULL, 0x000200000000ULL, MappingDesc::APP, "low memory"},
164 {0x000200000000ULL, 0x080000000000ULL, MappingDesc::INVALID, "invalid"},
165 {0x080000000000ULL, 0x180200000000ULL, MappingDesc::SHADOW, "shadow"},
166 {0x180200000000ULL, 0x1C0000000000ULL, MappingDesc::INVALID, "invalid"},
167 {0x1C0000000000ULL, 0x2C0200000000ULL, MappingDesc::ORIGIN, "origin"},
168 {0x2C0200000000ULL, 0x300000000000ULL, MappingDesc::INVALID, "invalid"},
169 {0x300000000000ULL, 0x800000000000ULL, MappingDesc::APP, "high memory"}};
171 // Various kernels use different low end ranges but we can combine them into one
172 // big range. They also use different high end ranges but we can map them all to
173 // one range.
174 // Maps low and high app ranges to contiguous space with zero base:
175 // Low: 0000 0000 0000 - 0001 ffff ffff -> 1000 0000 0000 - 1001 ffff ffff
176 // High: 3000 0000 0000 - 3fff ffff ffff -> 0000 0000 0000 - 0fff ffff ffff
177 // High: 4000 0000 0000 - 4fff ffff ffff -> 0000 0000 0000 - 0fff ffff ffff
178 // High: 7000 0000 0000 - 7fff ffff ffff -> 0000 0000 0000 - 0fff ffff ffff
179 #define LINEARIZE_MEM(mem) \
180 (((uptr)(mem) & ~0xE00000000000ULL) ^ 0x100000000000ULL)
181 #define MEM_TO_SHADOW(mem) (LINEARIZE_MEM((mem)) + 0x080000000000ULL)
182 #define SHADOW_TO_ORIGIN(shadow) (((uptr)(shadow)) + 0x140000000000ULL)
184 #elif SANITIZER_LINUX && SANITIZER_S390_64
185 const MappingDesc kMemoryLayout[] = {
186 {0x000000000000ULL, 0x040000000000ULL, MappingDesc::APP, "low memory"},
187 {0x040000000000ULL, 0x080000000000ULL, MappingDesc::INVALID, "invalid"},
188 {0x080000000000ULL, 0x180000000000ULL, MappingDesc::SHADOW, "shadow"},
189 {0x180000000000ULL, 0x1C0000000000ULL, MappingDesc::INVALID, "invalid"},
190 {0x1C0000000000ULL, 0x2C0000000000ULL, MappingDesc::ORIGIN, "origin"},
191 {0x2C0000000000ULL, 0x440000000000ULL, MappingDesc::INVALID, "invalid"},
192 {0x440000000000ULL, 0x500000000000ULL, MappingDesc::APP, "high memory"}};
194 #define MEM_TO_SHADOW(mem) \
195 ((((uptr)(mem)) & ~0xC00000000000ULL) + 0x080000000000ULL)
196 #define SHADOW_TO_ORIGIN(shadow) (((uptr)(shadow)) + 0x140000000000ULL)
198 #elif SANITIZER_FREEBSD && SANITIZER_WORDSIZE == 64
200 // Low memory: main binary, MAP_32BIT mappings and modules
201 // High memory: heap, modules and main thread stack
202 const MappingDesc kMemoryLayout[] = {
203 {0x000000000000ULL, 0x010000000000ULL, MappingDesc::APP, "low memory"},
204 {0x010000000000ULL, 0x100000000000ULL, MappingDesc::INVALID, "invalid"},
205 {0x100000000000ULL, 0x310000000000ULL, MappingDesc::SHADOW, "shadow"},
206 {0x310000000000ULL, 0x380000000000ULL, MappingDesc::INVALID, "invalid"},
207 {0x380000000000ULL, 0x590000000000ULL, MappingDesc::ORIGIN, "origin"},
208 {0x590000000000ULL, 0x600000000000ULL, MappingDesc::INVALID, "invalid"},
209 {0x600000000000ULL, 0x800000000000ULL, MappingDesc::APP, "high memory"}};
211 // Maps low and high app ranges to contiguous space with zero base:
212 // Low: 0000 0000 0000 - 00ff ffff ffff -> 2000 0000 0000 - 20ff ffff ffff
213 // High: 6000 0000 0000 - 7fff ffff ffff -> 0000 0000 0000 - 1fff ffff ffff
214 #define LINEARIZE_MEM(mem) \
215 (((uptr)(mem) & ~0xc00000000000ULL) ^ 0x200000000000ULL)
216 #define MEM_TO_SHADOW(mem) (LINEARIZE_MEM((mem)) + 0x100000000000ULL)
217 #define SHADOW_TO_ORIGIN(shadow) (((uptr)(shadow)) + 0x280000000000)
219 #elif SANITIZER_NETBSD || (SANITIZER_LINUX && SANITIZER_WORDSIZE == 64)
221 #ifdef MSAN_LINUX_X86_64_OLD_MAPPING
222 // Requries PIE binary and ASLR enabled.
223 // Main thread stack and DSOs at 0x7f0000000000 (sometimes 0x7e0000000000).
224 // Heap at 0x600000000000.
225 const MappingDesc kMemoryLayout[] = {
226 {0x000000000000ULL, 0x200000000000ULL, MappingDesc::INVALID, "invalid"},
227 {0x200000000000ULL, 0x400000000000ULL, MappingDesc::SHADOW, "shadow"},
228 {0x400000000000ULL, 0x600000000000ULL, MappingDesc::ORIGIN, "origin"},
229 {0x600000000000ULL, 0x800000000000ULL, MappingDesc::APP, "app"}};
231 #define MEM_TO_SHADOW(mem) (((uptr)(mem)) & ~0x400000000000ULL)
232 #define SHADOW_TO_ORIGIN(mem) (((uptr)(mem)) + 0x200000000000ULL)
233 #else // MSAN_LINUX_X86_64_OLD_MAPPING
234 // All of the following configurations are supported.
235 // ASLR disabled: main executable and DSOs at 0x555550000000
236 // PIE and ASLR: main executable and DSOs at 0x7f0000000000
237 // non-PIE: main executable below 0x100000000, DSOs at 0x7f0000000000
238 // Heap at 0x700000000000.
239 const MappingDesc kMemoryLayout[] = {
240 {0x000000000000ULL, 0x010000000000ULL, MappingDesc::APP, "app-1"},
241 {0x010000000000ULL, 0x100000000000ULL, MappingDesc::SHADOW, "shadow-2"},
242 {0x100000000000ULL, 0x110000000000ULL, MappingDesc::INVALID, "invalid"},
243 {0x110000000000ULL, 0x200000000000ULL, MappingDesc::ORIGIN, "origin-2"},
244 {0x200000000000ULL, 0x300000000000ULL, MappingDesc::SHADOW, "shadow-3"},
245 {0x300000000000ULL, 0x400000000000ULL, MappingDesc::ORIGIN, "origin-3"},
246 {0x400000000000ULL, 0x500000000000ULL, MappingDesc::INVALID, "invalid"},
247 {0x500000000000ULL, 0x510000000000ULL, MappingDesc::SHADOW, "shadow-1"},
248 {0x510000000000ULL, 0x600000000000ULL, MappingDesc::APP, "app-2"},
249 {0x600000000000ULL, 0x610000000000ULL, MappingDesc::ORIGIN, "origin-1"},
250 {0x610000000000ULL, 0x700000000000ULL, MappingDesc::INVALID, "invalid"},
251 {0x700000000000ULL, 0x800000000000ULL, MappingDesc::APP, "app-3"}};
252 #define MEM_TO_SHADOW(mem) (((uptr)(mem)) ^ 0x500000000000ULL)
253 #define SHADOW_TO_ORIGIN(mem) (((uptr)(mem)) + 0x100000000000ULL)
254 #endif // MSAN_LINUX_X86_64_OLD_MAPPING
256 #else
257 #error "Unsupported platform"
258 #endif
260 const uptr kMemoryLayoutSize = sizeof(kMemoryLayout) / sizeof(kMemoryLayout[0]);
262 #define MEM_TO_ORIGIN(mem) (SHADOW_TO_ORIGIN(MEM_TO_SHADOW((mem))))
264 #ifndef __clang__
265 __attribute__((optimize("unroll-loops")))
266 #endif
267 inline bool addr_is_type(uptr addr, MappingDesc::Type mapping_type) {
268 // It is critical for performance that this loop is unrolled (because then it is
269 // simplified into just a few constant comparisons).
270 #ifdef __clang__
271 #pragma unroll
272 #endif
273 for (unsigned i = 0; i < kMemoryLayoutSize; ++i)
274 if (kMemoryLayout[i].type == mapping_type &&
275 addr >= kMemoryLayout[i].start && addr < kMemoryLayout[i].end)
276 return true;
277 return false;
280 #define MEM_IS_APP(mem) addr_is_type((uptr)(mem), MappingDesc::APP)
281 #define MEM_IS_SHADOW(mem) addr_is_type((uptr)(mem), MappingDesc::SHADOW)
282 #define MEM_IS_ORIGIN(mem) addr_is_type((uptr)(mem), MappingDesc::ORIGIN)
284 // These constants must be kept in sync with the ones in MemorySanitizer.cpp.
285 const int kMsanParamTlsSize = 800;
286 const int kMsanRetvalTlsSize = 800;
288 namespace __msan {
289 extern int msan_inited;
290 extern bool msan_init_is_running;
291 extern int msan_report_count;
293 bool ProtectRange(uptr beg, uptr end);
294 bool InitShadow(bool init_origins);
295 char *GetProcSelfMaps();
296 void InitializeInterceptors();
298 void MsanAllocatorInit();
299 void MsanAllocatorThreadFinish();
300 void MsanDeallocate(StackTrace *stack, void *ptr);
302 void *msan_malloc(uptr size, StackTrace *stack);
303 void *msan_calloc(uptr nmemb, uptr size, StackTrace *stack);
304 void *msan_realloc(void *ptr, uptr size, StackTrace *stack);
305 void *msan_reallocarray(void *ptr, uptr nmemb, uptr size, StackTrace *stack);
306 void *msan_valloc(uptr size, StackTrace *stack);
307 void *msan_pvalloc(uptr size, StackTrace *stack);
308 void *msan_aligned_alloc(uptr alignment, uptr size, StackTrace *stack);
309 void *msan_memalign(uptr alignment, uptr size, StackTrace *stack);
310 int msan_posix_memalign(void **memptr, uptr alignment, uptr size,
311 StackTrace *stack);
313 void InstallTrapHandler();
314 void InstallAtExitHandler();
316 const char *GetStackOriginDescr(u32 id, uptr *pc);
318 void EnterSymbolizer();
319 void ExitSymbolizer();
320 bool IsInSymbolizer();
322 struct SymbolizerScope {
323 SymbolizerScope() { EnterSymbolizer(); }
324 ~SymbolizerScope() { ExitSymbolizer(); }
327 void PrintWarning(uptr pc, uptr bp);
328 void PrintWarningWithOrigin(uptr pc, uptr bp, u32 origin);
330 // Unpoison first n function arguments.
331 void UnpoisonParam(uptr n);
332 void UnpoisonThreadLocalState();
334 // Returns a "chained" origin id, pointing to the given stack trace followed by
335 // the previous origin id.
336 u32 ChainOrigin(u32 id, StackTrace *stack);
338 const int STACK_TRACE_TAG_POISON = StackTrace::TAG_CUSTOM + 1;
340 #define GET_MALLOC_STACK_TRACE \
341 BufferedStackTrace stack; \
342 if (__msan_get_track_origins() && msan_inited) \
343 stack.Unwind(StackTrace::GetCurrentPc(), GET_CURRENT_FRAME(), \
344 nullptr, common_flags()->fast_unwind_on_malloc, \
345 common_flags()->malloc_context_size)
347 // For platforms which support slow unwinder only, we restrict the store context
348 // size to 1, basically only storing the current pc. We do this because the slow
349 // unwinder which is based on libunwind is not async signal safe and causes
350 // random freezes in forking applications as well as in signal handlers.
351 #define GET_STORE_STACK_TRACE_PC_BP(pc, bp) \
352 BufferedStackTrace stack; \
353 if (__msan_get_track_origins() > 1 && msan_inited) { \
354 int size = flags()->store_context_size; \
355 if (!SANITIZER_CAN_FAST_UNWIND) \
356 size = Min(size, 1); \
357 stack.Unwind(pc, bp, nullptr, common_flags()->fast_unwind_on_malloc, size);\
360 #define GET_STORE_STACK_TRACE \
361 GET_STORE_STACK_TRACE_PC_BP(StackTrace::GetCurrentPc(), GET_CURRENT_FRAME())
363 #define GET_FATAL_STACK_TRACE_PC_BP(pc, bp) \
364 BufferedStackTrace stack; \
365 if (msan_inited) { \
366 stack.Unwind(pc, bp, nullptr, common_flags()->fast_unwind_on_fatal); \
369 #define GET_FATAL_STACK_TRACE_HERE \
370 GET_FATAL_STACK_TRACE_PC_BP(StackTrace::GetCurrentPc(), GET_CURRENT_FRAME())
372 #define PRINT_CURRENT_STACK_CHECK() \
374 GET_FATAL_STACK_TRACE_HERE; \
375 stack.Print(); \
378 class ScopedThreadLocalStateBackup {
379 public:
380 ScopedThreadLocalStateBackup() { Backup(); }
381 ~ScopedThreadLocalStateBackup() { Restore(); }
382 void Backup();
383 void Restore();
384 private:
385 u64 va_arg_overflow_size_tls;
388 void MsanTSDInit(void (*destructor)(void *tsd));
389 void *MsanTSDGet();
390 void MsanTSDSet(void *tsd);
391 void MsanTSDDtor(void *tsd);
393 } // namespace __msan
395 #define MSAN_MALLOC_HOOK(ptr, size) \
396 do { \
397 if (&__sanitizer_malloc_hook) { \
398 UnpoisonParam(2); \
399 __sanitizer_malloc_hook(ptr, size); \
401 RunMallocHooks(ptr, size); \
402 } while (false)
403 #define MSAN_FREE_HOOK(ptr) \
404 do { \
405 if (&__sanitizer_free_hook) { \
406 UnpoisonParam(1); \
407 __sanitizer_free_hook(ptr); \
409 RunFreeHooks(ptr); \
410 } while (false)
412 #endif // MSAN_H