Run DCE after a LoopFlatten test to reduce spurious output [nfc]
[llvm-project.git] / llvm / utils / lldbDataFormatters.py
blob0b61db60e80c7fee5fa7d7c35b609fb173a216de
1 """
2 LLDB Formatters for LLVM data types.
4 Load into LLDB with 'command script import /path/to/lldbDataFormatters.py'
5 """
7 import collections
8 import lldb
9 import json
12 def __lldb_init_module(debugger, internal_dict):
13 debugger.HandleCommand("type category define -e llvm -l c++")
14 debugger.HandleCommand(
15 "type synthetic add -w llvm "
16 f"-l {__name__}.SmallVectorSynthProvider "
17 '-x "^llvm::SmallVectorImpl<.+>$"'
19 debugger.HandleCommand(
20 "type summary add -w llvm "
21 '-e -s "size=${svar%#}" '
22 '-x "^llvm::SmallVectorImpl<.+>$"'
24 debugger.HandleCommand(
25 "type synthetic add -w llvm "
26 f"-l {__name__}.SmallVectorSynthProvider "
27 '-x "^llvm::SmallVector<.+,.+>$"'
29 debugger.HandleCommand(
30 "type summary add -w llvm "
31 '-e -s "size=${svar%#}" '
32 '-x "^llvm::SmallVector<.+,.+>$"'
34 debugger.HandleCommand(
35 "type synthetic add -w llvm "
36 f"-l {__name__}.ArrayRefSynthProvider "
37 '-x "^llvm::ArrayRef<.+>$"'
39 debugger.HandleCommand(
40 "type summary add -w llvm "
41 '-e -s "size=${svar%#}" '
42 '-x "^llvm::ArrayRef<.+>$"'
44 debugger.HandleCommand(
45 "type synthetic add -w llvm "
46 f"-l {__name__}.OptionalSynthProvider "
47 '-x "^llvm::Optional<.+>$"'
49 debugger.HandleCommand(
50 "type summary add -w llvm "
51 f"-e -F {__name__}.OptionalSummaryProvider "
52 '-x "^llvm::Optional<.+>$"'
54 debugger.HandleCommand(
55 "type summary add -w llvm "
56 f"-F {__name__}.SmallStringSummaryProvider "
57 '-x "^llvm::SmallString<.+>$"'
59 debugger.HandleCommand(
60 "type summary add -w llvm "
61 f"-F {__name__}.StringRefSummaryProvider "
62 "llvm::StringRef"
64 debugger.HandleCommand(
65 "type summary add -w llvm "
66 f"-F {__name__}.ConstStringSummaryProvider "
67 "lldb_private::ConstString"
70 # The synthetic providers for PointerIntPair and PointerUnion are disabled
71 # because of a few issues. One example is template arguments that are
72 # non-pointer types that instead specialize PointerLikeTypeTraits.
73 # debugger.HandleCommand(
74 # "type synthetic add -w llvm "
75 # f"-l {__name__}.PointerIntPairSynthProvider "
76 # '-x "^llvm::PointerIntPair<.+>$"'
77 # )
78 # debugger.HandleCommand(
79 # "type synthetic add -w llvm "
80 # f"-l {__name__}.PointerUnionSynthProvider "
81 # '-x "^llvm::PointerUnion<.+>$"'
82 # )
84 debugger.HandleCommand(
85 "type summary add -w llvm "
86 f"-e -F {__name__}.DenseMapSummary "
87 '-x "^llvm::DenseMap<.+>$"'
89 debugger.HandleCommand(
90 "type synthetic add -w llvm "
91 f"-l {__name__}.DenseMapSynthetic "
92 '-x "^llvm::DenseMap<.+>$"'
96 # Pretty printer for llvm::SmallVector/llvm::SmallVectorImpl
97 class SmallVectorSynthProvider:
98 def __init__(self, valobj, internal_dict):
99 self.valobj = valobj
100 self.update() # initialize this provider
102 def num_children(self):
103 return self.size.GetValueAsUnsigned(0)
105 def get_child_index(self, name):
106 try:
107 return int(name.lstrip("[").rstrip("]"))
108 except:
109 return -1
111 def get_child_at_index(self, index):
112 # Do bounds checking.
113 if index < 0:
114 return None
115 if index >= self.num_children():
116 return None
118 offset = index * self.type_size
119 return self.begin.CreateChildAtOffset(
120 "[" + str(index) + "]", offset, self.data_type
123 def update(self):
124 self.begin = self.valobj.GetChildMemberWithName("BeginX")
125 self.size = self.valobj.GetChildMemberWithName("Size")
126 the_type = self.valobj.GetType()
127 # If this is a reference type we have to dereference it to get to the
128 # template parameter.
129 if the_type.IsReferenceType():
130 the_type = the_type.GetDereferencedType()
132 if the_type.IsPointerType():
133 the_type = the_type.GetPointeeType()
135 self.data_type = the_type.GetTemplateArgumentType(0)
136 self.type_size = self.data_type.GetByteSize()
137 assert self.type_size != 0
140 class ArrayRefSynthProvider:
141 """Provider for llvm::ArrayRef"""
143 def __init__(self, valobj, internal_dict):
144 self.valobj = valobj
145 self.update() # initialize this provider
147 def num_children(self):
148 return self.length
150 def get_child_index(self, name):
151 try:
152 return int(name.lstrip("[").rstrip("]"))
153 except:
154 return -1
156 def get_child_at_index(self, index):
157 if index < 0 or index >= self.num_children():
158 return None
159 offset = index * self.type_size
160 return self.data.CreateChildAtOffset(
161 "[" + str(index) + "]", offset, self.data_type
164 def update(self):
165 self.data = self.valobj.GetChildMemberWithName("Data")
166 length_obj = self.valobj.GetChildMemberWithName("Length")
167 self.length = length_obj.GetValueAsUnsigned(0)
168 self.data_type = self.data.GetType().GetPointeeType()
169 self.type_size = self.data_type.GetByteSize()
170 assert self.type_size != 0
173 def GetOptionalValue(valobj):
174 storage = valobj.GetChildMemberWithName("Storage")
175 if not storage:
176 storage = valobj
178 failure = 2
179 hasVal = storage.GetChildMemberWithName("hasVal").GetValueAsUnsigned(failure)
180 if hasVal == failure:
181 return "<could not read llvm::Optional>"
183 if hasVal == 0:
184 return None
186 underlying_type = storage.GetType().GetTemplateArgumentType(0)
187 storage = storage.GetChildMemberWithName("value")
188 return storage.Cast(underlying_type)
191 def OptionalSummaryProvider(valobj, internal_dict):
192 val = GetOptionalValue(valobj)
193 if val is None:
194 return "None"
195 if val.summary:
196 return val.summary
197 return ""
200 class OptionalSynthProvider:
201 """Provides deref support to llvm::Optional<T>"""
203 def __init__(self, valobj, internal_dict):
204 self.valobj = valobj
206 def num_children(self):
207 return self.valobj.num_children
209 def get_child_index(self, name):
210 if name == "$$dereference$$":
211 return self.valobj.num_children
212 return self.valobj.GetIndexOfChildWithName(name)
214 def get_child_at_index(self, index):
215 if index < self.valobj.num_children:
216 return self.valobj.GetChildAtIndex(index)
217 return GetOptionalValue(self.valobj) or lldb.SBValue()
220 def SmallStringSummaryProvider(valobj, internal_dict):
221 num_elements = valobj.GetNumChildren()
222 res = '"'
223 for i in range(0, num_elements):
224 c = valobj.GetChildAtIndex(i).GetValue()
225 if c:
226 res += c.strip("'")
227 res += '"'
228 return res
231 def StringRefSummaryProvider(valobj, internal_dict):
232 if valobj.GetNumChildren() == 2:
233 # StringRef's are also used to point at binary blobs in memory,
234 # so filter out suspiciously long strings.
235 max_length = 1024
236 actual_length = valobj.GetChildAtIndex(1).GetValueAsUnsigned()
237 truncate = actual_length > max_length
238 length = min(max_length, actual_length)
239 if length == 0:
240 return '""'
242 data = valobj.GetChildAtIndex(0).GetPointeeData(item_count=length)
243 error = lldb.SBError()
244 string = data.ReadRawData(error, 0, data.GetByteSize()).decode()
245 if error.Fail():
246 return "<error: %s>" % error.description
248 # json.dumps conveniently escapes the string for us.
249 string = json.dumps(string)
250 if truncate:
251 string += "..."
252 return string
253 return None
256 def ConstStringSummaryProvider(valobj, internal_dict):
257 if valobj.GetNumChildren() == 1:
258 return valobj.GetChildAtIndex(0).GetSummary()
259 return ""
262 def get_expression_path(val):
263 stream = lldb.SBStream()
264 if not val.GetExpressionPath(stream):
265 return None
266 return stream.GetData()
269 class PointerIntPairSynthProvider:
270 def __init__(self, valobj, internal_dict):
271 self.valobj = valobj
272 self.update()
274 def num_children(self):
275 return 2
277 def get_child_index(self, name):
278 if name == "Pointer":
279 return 0
280 if name == "Int":
281 return 1
282 return None
284 def get_child_at_index(self, index):
285 expr_path = get_expression_path(self.valobj)
286 if index == 0:
287 return self.valobj.CreateValueFromExpression(
288 "Pointer", f"({self.pointer_ty.name}){expr_path}.getPointer()"
290 if index == 1:
291 return self.valobj.CreateValueFromExpression(
292 "Int", f"({self.int_ty.name}){expr_path}.getInt()"
294 return None
296 def update(self):
297 self.pointer_ty = self.valobj.GetType().GetTemplateArgumentType(0)
298 self.int_ty = self.valobj.GetType().GetTemplateArgumentType(2)
301 def parse_template_parameters(typename):
303 LLDB doesn't support template parameter packs, so let's parse them manually.
305 result = []
306 start = typename.find("<")
307 end = typename.rfind(">")
308 if start < 1 or end < 2 or end - start < 2:
309 return result
311 nesting_level = 0
312 current_parameter_start = start + 1
314 for i in range(start + 1, end + 1):
315 c = typename[i]
316 if c == "<":
317 nesting_level += 1
318 elif c == ">":
319 nesting_level -= 1
320 elif c == "," and nesting_level == 0:
321 result.append(typename[current_parameter_start:i].strip())
322 current_parameter_start = i + 1
324 result.append(typename[current_parameter_start:i].strip())
326 return result
329 class PointerUnionSynthProvider:
330 def __init__(self, valobj, internal_dict):
331 self.valobj = valobj
332 self.update()
334 def num_children(self):
335 return 1
337 def get_child_index(self, name):
338 if name == "Ptr":
339 return 0
340 return None
342 def get_child_at_index(self, index):
343 if index != 0:
344 return None
345 ptr_type_name = self.template_args[self.active_type_tag]
346 return self.valobj.CreateValueFromExpression(
347 "Ptr", f"({ptr_type_name}){self.val_expr_path}.getPointer()"
350 def update(self):
351 self.pointer_int_pair = self.valobj.GetChildMemberWithName("Val")
352 self.val_expr_path = get_expression_path(
353 self.valobj.GetChildMemberWithName("Val")
355 self.active_type_tag = self.valobj.CreateValueFromExpression(
356 "", f"(int){self.val_expr_path}.getInt()"
357 ).GetValueAsSigned()
358 self.template_args = parse_template_parameters(self.valobj.GetType().name)
361 def DenseMapSummary(valobj: lldb.SBValue, _) -> str:
362 raw_value = valobj.GetNonSyntheticValue()
363 num_entries = raw_value.GetChildMemberWithName("NumEntries").unsigned
364 num_tombstones = raw_value.GetChildMemberWithName("NumTombstones").unsigned
366 summary = f"size={num_entries}"
367 if num_tombstones == 1:
368 # The heuristic to identify valid entries does not handle the case of a
369 # single tombstone. The summary calls attention to this.
370 summary = f"tombstones=1, {summary}"
371 return summary
374 class DenseMapSynthetic:
375 valobj: lldb.SBValue
377 # The indexes into `Buckets` that contain valid map entries.
378 child_buckets: list[int]
380 def __init__(self, valobj: lldb.SBValue, _) -> None:
381 self.valobj = valobj
383 def num_children(self) -> int:
384 return len(self.child_buckets)
386 def get_child_at_index(self, child_index: int) -> lldb.SBValue:
387 bucket_index = self.child_buckets[child_index]
388 entry = self.valobj.GetValueForExpressionPath(f".Buckets[{bucket_index}]")
390 # By default, DenseMap instances use DenseMapPair to hold key-value
391 # entries. When the entry is a DenseMapPair, unwrap it to expose the
392 # children as simple std::pair values.
394 # This entry type is customizable (a template parameter). For other
395 # types, expose the entry type as is.
396 if entry.type.name.startswith("llvm::detail::DenseMapPair<"):
397 entry = entry.GetChildAtIndex(0)
399 return entry.Clone(f"[{child_index}]")
401 def update(self):
402 self.child_buckets = []
404 num_entries = self.valobj.GetChildMemberWithName("NumEntries").unsigned
405 if num_entries == 0:
406 return
408 buckets = self.valobj.GetChildMemberWithName("Buckets")
409 num_buckets = self.valobj.GetChildMemberWithName("NumBuckets").unsigned
411 # Bucket entries contain one of the following:
412 # 1. Valid key-value
413 # 2. Empty key
414 # 3. Tombstone key (a deleted entry)
416 # NumBuckets is always greater than NumEntries. The empty key, and
417 # potentially the tombstone key, will occur multiple times. A key that
418 # is repeated is either the empty key or the tombstone key.
420 # For each key, collect a list of buckets it appears in.
421 key_buckets: dict[str, list[int]] = collections.defaultdict(list)
422 for index in range(num_buckets):
423 key = buckets.GetValueForExpressionPath(f"[{index}].first")
424 key_buckets[str(key.data)].append(index)
426 # Heuristic: This is not a multi-map, any repeated (non-unique) keys are
427 # either the the empty key or the tombstone key. Populate child_buckets
428 # with the indexes of entries containing unique keys.
429 for indexes in key_buckets.values():
430 if len(indexes) == 1:
431 self.child_buckets.append(indexes[0])