16 def dump_memory(base_addr
, data
, num_per_line
, outfile
):
18 hex_string
= binascii
.hexlify(data
)
23 outfile
.write("0x%8.8x: " % (addr
+ i
))
24 bytes_left
= data_len
- i
25 if bytes_left
>= num_per_line
:
26 curr_data_len
= num_per_line
28 curr_data_len
= bytes_left
30 hex_end_idx
= hex_start_idx
+ curr_data_len
* 2
31 curr_hex_str
= hex_string
[hex_start_idx
:hex_end_idx
]
32 # 'curr_hex_str' now contains the hex byte string for the
33 # current line with no spaces between bytes
34 t
= iter(curr_hex_str
)
35 # Print hex bytes separated by space
36 outfile
.write(" ".join(a
+ b
for a
, b
in zip(t
, t
)))
39 # Calculate ASCII string for bytes into 'ascii_str'
41 for j
in range(i
, i
+ curr_data_len
):
43 if ch
in string
.printable
and ch
not in string
.whitespace
:
44 ascii_str
+= "%c" % (ch
)
47 # Print ASCII representation and newline
48 outfile
.write(ascii_str
)
53 def read_packet(f
, verbose
=False, trace_file
=None):
54 """Decode a JSON packet that starts with the content length and is
55 followed by the JSON bytes from a file 'f'. Returns None on EOF.
57 line
= f
.readline().decode("utf-8")
61 # Watch for line that starts with the prefix
62 prefix
= "Content-Length: "
63 if line
.startswith(prefix
):
64 # Decode length of JSON bytes
66 print('content: "%s"' % (line
))
67 length
= int(line
[len(prefix
) :])
69 print('length: "%u"' % (length
))
73 print('empty: "%s"' % (line
))
75 json_str
= f
.read(length
)
77 print('json: "%s"' % (json_str
))
79 trace_file
.write("from adaptor:\n%s\n" % (json_str
))
80 # Decode the JSON bytes into a python dictionary
81 return json
.loads(json_str
)
83 raise Exception("unexpected malformed message from lldb-dap: " + line
)
86 def packet_type_is(packet
, packet_type
):
87 return "type" in packet
and packet
["type"] == packet_type
90 def dump_dap_log(log_file
):
91 print("========= DEBUG ADAPTER PROTOCOL LOGS =========")
93 print("no log file available")
95 with
open(log_file
, "r") as file:
97 print("========= END =========")
100 def read_packet_thread(vs_comm
, log_file
):
104 packet
= read_packet(vs_comm
.recv
, trace_file
=vs_comm
.trace_file
)
105 # `packet` will be `None` on EOF. We want to pass it down to
106 # handle_recv_packet anyway so the main thread can handle unexpected
107 # termination of lldb-dap and stop waiting for new packets.
108 done
= not vs_comm
.handle_recv_packet(packet
)
110 dump_dap_log(log_file
)
113 class DebugCommunication(object):
114 def __init__(self
, recv
, send
, init_commands
, log_file
=None):
115 self
.trace_file
= None
118 self
.recv_packets
= []
119 self
.recv_condition
= threading
.Condition()
120 self
.recv_thread
= threading
.Thread(
121 target
=read_packet_thread
, args
=(self
, log_file
)
123 self
.process_event_body
= None
124 self
.exit_status
= None
125 self
.initialize_body
= None
126 self
.thread_stop_reasons
= {}
127 self
.breakpoint_events
= []
128 self
.progress_events
= []
129 self
.reverse_requests
= []
132 self
.recv_thread
.start()
133 self
.output_condition
= threading
.Condition()
135 self
.configuration_done_sent
= False
136 self
.frame_scopes
= {}
137 self
.init_commands
= init_commands
138 self
.disassembled_instructions
= {}
141 def encode_content(cls
, s
):
142 return ("Content-Length: %u\r\n\r\n%s" % (len(s
), s
)).encode("utf-8")
145 def validate_response(cls
, command
, response
):
146 if command
["command"] != response
["command"]:
147 raise ValueError("command mismatch in response")
148 if command
["seq"] != response
["request_seq"]:
149 raise ValueError("seq mismatch in response")
151 def get_modules(self
):
152 module_list
= self
.request_modules()["body"]["modules"]
154 for module
in module_list
:
155 modules
[module
["name"]] = module
158 def get_output(self
, category
, timeout
=0.0, clear
=True):
159 self
.output_condition
.acquire()
161 if category
in self
.output
:
162 output
= self
.output
[category
]
164 del self
.output
[category
]
166 self
.output_condition
.wait(timeout
)
167 if category
in self
.output
:
168 output
= self
.output
[category
]
170 del self
.output
[category
]
171 self
.output_condition
.release()
174 def collect_output(self
, category
, duration
, clear
=True):
175 end_time
= time
.time() + duration
176 collected_output
= ""
177 while end_time
> time
.time():
178 output
= self
.get_output(category
, timeout
=0.25, clear
=clear
)
180 collected_output
+= output
181 return collected_output
if collected_output
else None
183 def enqueue_recv_packet(self
, packet
):
184 self
.recv_condition
.acquire()
185 self
.recv_packets
.append(packet
)
186 self
.recv_condition
.notify()
187 self
.recv_condition
.release()
189 def handle_recv_packet(self
, packet
):
190 """Called by the read thread that is waiting for all incoming packets
191 to store the incoming packet in "self.recv_packets" in a thread safe
192 way. This function will then signal the "self.recv_condition" to
193 indicate a new packet is available. Returns True if the caller
194 should keep calling this function for more packets.
196 # If EOF, notify the read thread by enqueuing a None.
198 self
.enqueue_recv_packet(None)
201 # Check the packet to see if is an event packet
203 packet_type
= packet
["type"]
204 if packet_type
== "event":
205 event
= packet
["event"]
208 body
= packet
["body"]
209 # Handle the event packet and cache information from these packets
211 if event
== "output":
212 # Store any output we receive so clients can retrieve it later.
213 category
= body
["category"]
214 output
= body
["output"]
215 self
.output_condition
.acquire()
216 if category
in self
.output
:
217 self
.output
[category
] += output
219 self
.output
[category
] = output
220 self
.output_condition
.notify()
221 self
.output_condition
.release()
222 # no need to add 'output' event packets to our packets list
224 elif event
== "process":
225 # When a new process is attached or launched, remember the
226 # details that are available in the body of the event
227 self
.process_event_body
= body
228 elif event
== "stopped":
229 # Each thread that stops with a reason will send a
230 # 'stopped' event. We need to remember the thread stop
231 # reasons since the 'threads' command doesn't return
233 self
._process
_stopped
()
234 tid
= body
["threadId"]
235 self
.thread_stop_reasons
[tid
] = body
236 elif event
== "breakpoint":
237 # Breakpoint events come in when a breakpoint has locations
238 # added or removed. Keep track of them so we can look for them
240 self
.breakpoint_events
.append(packet
)
241 # no need to add 'breakpoint' event packets to our packets list
243 elif event
.startswith("progress"):
244 # Progress events come in as 'progressStart', 'progressUpdate',
245 # and 'progressEnd' events. Keep these around in case test
246 # cases want to verify them.
247 self
.progress_events
.append(packet
)
248 # No need to add 'progress' event packets to our packets list.
251 elif packet_type
== "response":
252 if packet
["command"] == "disconnect":
254 self
.enqueue_recv_packet(packet
)
257 def send_packet(self
, command_dict
, set_sequence
=True):
258 """Take the "command_dict" python dictionary and encode it as a JSON
259 string and send the contents as a packet to the VSCode debug
261 # Set the sequence ID for this command automatically
263 command_dict
["seq"] = self
.sequence
265 # Encode our command dictionary as a JSON string
266 json_str
= json
.dumps(command_dict
, separators
=(",", ":"))
268 self
.trace_file
.write("to adaptor:\n%s\n" % (json_str
))
269 length
= len(json_str
)
271 # Send the encoded JSON packet and flush the 'send' file
272 self
.send
.write(self
.encode_content(json_str
))
275 def recv_packet(self
, filter_type
=None, filter_event
=None, timeout
=None):
276 """Get a JSON packet from the VSCode debug adaptor. This function
277 assumes a thread that reads packets is running and will deliver
278 any received packets by calling handle_recv_packet(...). This
279 function will wait for the packet to arrive and return it when
283 self
.recv_condition
.acquire()
286 for i
, curr_packet
in enumerate(self
.recv_packets
):
289 packet_type
= curr_packet
["type"]
290 if filter_type
is None or packet_type
in filter_type
:
291 if filter_event
is None or (
292 packet_type
== "event"
293 and curr_packet
["event"] in filter_event
295 packet
= self
.recv_packets
.pop(i
)
299 # Sleep until packet is received
300 len_before
= len(self
.recv_packets
)
301 self
.recv_condition
.wait(timeout
)
302 len_after
= len(self
.recv_packets
)
303 if len_before
== len_after
:
304 return None # Timed out
309 self
.recv_condition
.release()
313 def send_recv(self
, command
):
314 """Send a command python dictionary as JSON and receive the JSON
315 response. Validates that the response is the correct sequence and
316 command in the reply. Any events that are received are added to the
317 events list in this object"""
318 self
.send_packet(command
)
321 response_or_request
= self
.recv_packet(filter_type
=["response", "request"])
322 if response_or_request
is None:
323 desc
= 'no response for "%s"' % (command
["command"])
324 raise ValueError(desc
)
325 if response_or_request
["type"] == "response":
326 self
.validate_response(command
, response_or_request
)
327 return response_or_request
329 self
.reverse_requests
.append(response_or_request
)
330 if response_or_request
["command"] == "runInTerminal":
332 response_or_request
["arguments"]["args"],
333 env
=response_or_request
["arguments"]["env"],
339 "request_seq": response_or_request
["seq"],
341 "command": "runInTerminal",
346 elif response_or_request
["command"] == "startDebugging":
351 "request_seq": response_or_request
["seq"],
353 "command": "startDebugging",
359 desc
= 'unknown reverse request "%s"' % (
360 response_or_request
["command"]
362 raise ValueError(desc
)
366 def wait_for_event(self
, filter=None, timeout
=None):
368 return self
.recv_packet(
369 filter_type
="event", filter_event
=filter, timeout
=timeout
373 def wait_for_stopped(self
, timeout
=None):
375 stopped_event
= self
.wait_for_event(
376 filter=["stopped", "exited"], timeout
=timeout
380 stopped_events
.append(stopped_event
)
381 # If we exited, then we are done
382 if stopped_event
["event"] == "exited":
383 self
.exit_status
= stopped_event
["body"]["exitCode"]
386 # Otherwise we stopped and there might be one or more 'stopped'
387 # events for each thread that stopped with a reason, so keep
388 # checking for more 'stopped' events and return all of them
389 stopped_event
= self
.wait_for_event(filter="stopped", timeout
=0.25)
392 return stopped_events
394 def wait_for_exited(self
):
395 event_dict
= self
.wait_for_event("exited")
396 if event_dict
is None:
397 raise ValueError("didn't get exited event")
400 def wait_for_terminated(self
):
401 event_dict
= self
.wait_for_event("terminated")
402 if event_dict
is None:
403 raise ValueError("didn't get terminated event")
406 def get_initialize_value(self
, key
):
407 """Get a value for the given key if it there is a key/value pair in
408 the "initialize" request response body.
410 if self
.initialize_body
and key
in self
.initialize_body
:
411 return self
.initialize_body
[key
]
414 def get_threads(self
):
415 if self
.threads
is None:
416 self
.request_threads()
419 def get_thread_id(self
, threadIndex
=0):
420 """Utility function to get the first thread ID in the thread list.
421 If the thread list is empty, then fetch the threads.
423 if self
.threads
is None:
424 self
.request_threads()
425 if self
.threads
and threadIndex
< len(self
.threads
):
426 return self
.threads
[threadIndex
]["id"]
429 def get_stackFrame(self
, frameIndex
=0, threadId
=None):
430 """Get a single "StackFrame" object from a "stackTrace" request and
431 return the "StackFrame" as a python dictionary, or None on failure
434 threadId
= self
.get_thread_id()
436 print("invalid threadId")
438 response
= self
.request_stackTrace(threadId
, startFrame
=frameIndex
, levels
=1)
440 return response
["body"]["stackFrames"][0]
441 print("invalid response")
444 def get_completions(self
, text
, frameId
=None):
446 stackFrame
= self
.get_stackFrame()
447 frameId
= stackFrame
["id"]
448 response
= self
.request_completions(text
, frameId
)
449 return response
["body"]["targets"]
451 def get_scope_variables(self
, scope_name
, frameIndex
=0, threadId
=None):
452 stackFrame
= self
.get_stackFrame(frameIndex
=frameIndex
, threadId
=threadId
)
453 if stackFrame
is None:
455 frameId
= stackFrame
["id"]
456 if frameId
in self
.frame_scopes
:
457 frame_scopes
= self
.frame_scopes
[frameId
]
459 scopes_response
= self
.request_scopes(frameId
)
460 frame_scopes
= scopes_response
["body"]["scopes"]
461 self
.frame_scopes
[frameId
] = frame_scopes
462 for scope
in frame_scopes
:
463 if scope
["name"] == scope_name
:
464 varRef
= scope
["variablesReference"]
465 variables_response
= self
.request_variables(varRef
)
466 if variables_response
:
467 if "body" in variables_response
:
468 body
= variables_response
["body"]
469 if "variables" in body
:
470 vars = body
["variables"]
474 def get_global_variables(self
, frameIndex
=0, threadId
=None):
475 return self
.get_scope_variables(
476 "Globals", frameIndex
=frameIndex
, threadId
=threadId
479 def get_local_variables(self
, frameIndex
=0, threadId
=None):
480 return self
.get_scope_variables(
481 "Locals", frameIndex
=frameIndex
, threadId
=threadId
484 def get_registers(self
, frameIndex
=0, threadId
=None):
485 return self
.get_scope_variables(
486 "Registers", frameIndex
=frameIndex
, threadId
=threadId
489 def get_local_variable(self
, name
, frameIndex
=0, threadId
=None):
490 locals = self
.get_local_variables(frameIndex
=frameIndex
, threadId
=threadId
)
492 if "name" in local
and local
["name"] == name
:
496 def get_local_variable_value(self
, name
, frameIndex
=0, threadId
=None):
497 variable
= self
.get_local_variable(
498 name
, frameIndex
=frameIndex
, threadId
=threadId
500 if variable
and "value" in variable
:
501 return variable
["value"]
504 def replay_packets(self
, replay_file_path
):
505 f
= open(replay_file_path
, "r")
510 if mode
== "invalid":
512 if line
.startswith("to adapter:"):
514 elif line
.startswith("from adapter:"):
517 command_dict
= read_packet(f
)
518 # Skip the end of line that follows the JSON
520 if command_dict
is None:
521 raise ValueError("decode packet failed from replay file")
523 pprint
.PrettyPrinter(indent
=2).pprint(command_dict
)
524 # raw_input('Press ENTER to send:')
525 self
.send_packet(command_dict
, set_sequence
)
528 print("Replay response:")
529 replay_response
= read_packet(f
)
530 # Skip the end of line that follows the JSON
532 pprint
.PrettyPrinter(indent
=2).pprint(replay_response
)
533 actual_response
= self
.recv_packet()
535 type = actual_response
["type"]
536 print("Actual response:")
537 if type == "response":
538 self
.validate_response(command_dict
, actual_response
)
539 pprint
.PrettyPrinter(indent
=2).pprint(actual_response
)
541 print("error: didn't get a valid response")
555 terminateCommands
=None,
557 postRunCommands
=None,
562 args_dict
["pid"] = pid
563 if program
is not None:
564 args_dict
["program"] = program
565 if waitFor
is not None:
566 args_dict
["waitFor"] = waitFor
568 args_dict
["trace"] = trace
569 args_dict
["initCommands"] = self
.init_commands
571 args_dict
["initCommands"].extend(initCommands
)
573 args_dict
["preRunCommands"] = preRunCommands
575 args_dict
["stopCommands"] = stopCommands
577 args_dict
["exitCommands"] = exitCommands
578 if terminateCommands
:
579 args_dict
["terminateCommands"] = terminateCommands
581 args_dict
["attachCommands"] = attachCommands
583 args_dict
["coreFile"] = coreFile
585 args_dict
["postRunCommands"] = postRunCommands
587 args_dict
["sourceMap"] = sourceMap
588 command_dict
= {"command": "attach", "type": "request", "arguments": args_dict
}
589 return self
.send_recv(command_dict
)
591 def request_configurationDone(self
):
593 "command": "configurationDone",
597 response
= self
.send_recv(command_dict
)
599 self
.configuration_done_sent
= True
602 def _process_stopped(self
):
604 self
.frame_scopes
= {}
606 def request_continue(self
, threadId
=None):
607 if self
.exit_status
is not None:
608 raise ValueError("request_continue called after process exited")
609 # If we have launched or attached, then the first continue is done by
610 # sending the 'configurationDone' request
611 if not self
.configuration_done_sent
:
612 return self
.request_configurationDone()
615 threadId
= self
.get_thread_id()
616 args_dict
["threadId"] = threadId
618 "command": "continue",
620 "arguments": args_dict
,
622 response
= self
.send_recv(command_dict
)
623 # Caller must still call wait_for_stopped.
626 def request_restart(self
, restartArguments
=None):
628 "command": "restart",
632 command_dict
["arguments"] = restartArguments
634 response
= self
.send_recv(command_dict
)
635 # Caller must still call wait_for_stopped.
638 def request_disconnect(self
, terminateDebuggee
=None):
640 if terminateDebuggee
is not None:
641 if terminateDebuggee
:
642 args_dict
["terminateDebuggee"] = True
644 args_dict
["terminateDebuggee"] = False
646 "command": "disconnect",
648 "arguments": args_dict
,
650 return self
.send_recv(command_dict
)
652 def request_disassemble(
653 self
, memoryReference
, offset
=-50, instructionCount
=200, resolveSymbols
=True
656 "memoryReference": memoryReference
,
658 "instructionCount": instructionCount
,
659 "resolveSymbols": resolveSymbols
,
662 "command": "disassemble",
664 "arguments": args_dict
,
666 instructions
= self
.send_recv(command_dict
)["body"]["instructions"]
667 for inst
in instructions
:
668 self
.disassembled_instructions
[inst
["address"]] = inst
670 def request_evaluate(self
, expression
, frameIndex
=0, threadId
=None, context
=None):
671 stackFrame
= self
.get_stackFrame(frameIndex
=frameIndex
, threadId
=threadId
)
672 if stackFrame
is None:
675 "expression": expression
,
677 "frameId": stackFrame
["id"],
680 "command": "evaluate",
682 "arguments": args_dict
,
684 return self
.send_recv(command_dict
)
686 def request_initialize(self
, sourceInitFile
):
688 "command": "initialize",
691 "adapterID": "lldb-native",
692 "clientID": "vscode",
693 "columnsStartAt1": True,
694 "linesStartAt1": True,
696 "pathFormat": "path",
697 "supportsRunInTerminalRequest": True,
698 "supportsVariablePaging": True,
699 "supportsVariableType": True,
700 "supportsStartDebuggingRequest": True,
701 "sourceInitFile": sourceInitFile
,
704 response
= self
.send_recv(command_dict
)
706 if "body" in response
:
707 self
.initialize_body
= response
["body"]
719 shellExpandArguments
=False,
725 terminateCommands
=None,
731 postRunCommands
=None,
732 enableAutoVariableSummaries
=False,
733 enableSyntheticChildDebugging
=False,
734 commandEscapePrefix
="`",
736 args_dict
= {"program": program
}
738 args_dict
["args"] = args
740 args_dict
["cwd"] = cwd
742 args_dict
["env"] = env
744 args_dict
["stopOnEntry"] = stopOnEntry
746 args_dict
["disableASLR"] = disableASLR
748 args_dict
["disableSTDIO"] = disableSTDIO
749 if shellExpandArguments
:
750 args_dict
["shellExpandArguments"] = shellExpandArguments
752 args_dict
["trace"] = trace
753 args_dict
["initCommands"] = self
.init_commands
755 args_dict
["initCommands"].extend(initCommands
)
757 args_dict
["preRunCommands"] = preRunCommands
759 args_dict
["stopCommands"] = stopCommands
761 args_dict
["exitCommands"] = exitCommands
762 if terminateCommands
:
763 args_dict
["terminateCommands"] = terminateCommands
765 args_dict
["sourcePath"] = sourcePath
767 args_dict
["debuggerRoot"] = debuggerRoot
769 args_dict
["launchCommands"] = launchCommands
771 args_dict
["sourceMap"] = sourceMap
773 args_dict
["runInTerminal"] = runInTerminal
775 args_dict
["postRunCommands"] = postRunCommands
776 args_dict
["enableAutoVariableSummaries"] = enableAutoVariableSummaries
777 args_dict
["enableSyntheticChildDebugging"] = enableSyntheticChildDebugging
778 args_dict
["commandEscapePrefix"] = commandEscapePrefix
779 command_dict
= {"command": "launch", "type": "request", "arguments": args_dict
}
780 response
= self
.send_recv(command_dict
)
782 if response
["success"]:
783 # Wait for a 'process' and 'initialized' event in any order
784 self
.wait_for_event(filter=["process", "initialized"])
785 self
.wait_for_event(filter=["process", "initialized"])
788 def request_next(self
, threadId
):
789 if self
.exit_status
is not None:
790 raise ValueError("request_continue called after process exited")
791 args_dict
= {"threadId": threadId
}
792 command_dict
= {"command": "next", "type": "request", "arguments": args_dict
}
793 return self
.send_recv(command_dict
)
795 def request_stepIn(self
, threadId
):
796 if self
.exit_status
is not None:
797 raise ValueError("request_continue called after process exited")
798 args_dict
= {"threadId": threadId
}
799 command_dict
= {"command": "stepIn", "type": "request", "arguments": args_dict
}
800 return self
.send_recv(command_dict
)
802 def request_stepOut(self
, threadId
):
803 if self
.exit_status
is not None:
804 raise ValueError("request_continue called after process exited")
805 args_dict
= {"threadId": threadId
}
806 command_dict
= {"command": "stepOut", "type": "request", "arguments": args_dict
}
807 return self
.send_recv(command_dict
)
809 def request_pause(self
, threadId
=None):
810 if self
.exit_status
is not None:
811 raise ValueError("request_continue called after process exited")
813 threadId
= self
.get_thread_id()
814 args_dict
= {"threadId": threadId
}
815 command_dict
= {"command": "pause", "type": "request", "arguments": args_dict
}
816 return self
.send_recv(command_dict
)
818 def request_scopes(self
, frameId
):
819 args_dict
= {"frameId": frameId
}
820 command_dict
= {"command": "scopes", "type": "request", "arguments": args_dict
}
821 return self
.send_recv(command_dict
)
823 def request_setBreakpoints(self
, file_path
, line_array
, data
=None):
824 """data is array of parameters for breakpoints in line_array.
825 Each parameter object is 1:1 mapping with entries in line_entry.
826 It contains optional location/hitCondition/logMessage parameters.
828 (dir, base
) = os
.path
.split(file_path
)
829 source_dict
= {"name": base
, "path": file_path
}
831 "source": source_dict
,
832 "sourceModified": False,
834 if line_array
is not None:
835 args_dict
["lines"] = "%s" % line_array
837 for i
, line
in enumerate(line_array
):
838 breakpoint_data
= None
839 if data
is not None and i
< len(data
):
840 breakpoint_data
= data
[i
]
842 if breakpoint_data
is not None:
843 if "condition" in breakpoint_data
and breakpoint_data
["condition"]:
844 bp
["condition"] = breakpoint_data
["condition"]
846 "hitCondition" in breakpoint_data
847 and breakpoint_data
["hitCondition"]
849 bp
["hitCondition"] = breakpoint_data
["hitCondition"]
851 "logMessage" in breakpoint_data
852 and breakpoint_data
["logMessage"]
854 bp
["logMessage"] = breakpoint_data
["logMessage"]
855 breakpoints
.append(bp
)
856 args_dict
["breakpoints"] = breakpoints
859 "command": "setBreakpoints",
861 "arguments": args_dict
,
863 return self
.send_recv(command_dict
)
865 def request_setExceptionBreakpoints(self
, filters
):
866 args_dict
= {"filters": filters
}
868 "command": "setExceptionBreakpoints",
870 "arguments": args_dict
,
872 return self
.send_recv(command_dict
)
874 def request_setFunctionBreakpoints(self
, names
, condition
=None, hitCondition
=None):
878 if condition
is not None:
879 bp
["condition"] = condition
880 if hitCondition
is not None:
881 bp
["hitCondition"] = hitCondition
882 breakpoints
.append(bp
)
883 args_dict
= {"breakpoints": breakpoints
}
885 "command": "setFunctionBreakpoints",
887 "arguments": args_dict
,
889 return self
.send_recv(command_dict
)
891 def request_compileUnits(self
, moduleId
):
892 args_dict
= {"moduleId": moduleId
}
894 "command": "compileUnits",
896 "arguments": args_dict
,
898 response
= self
.send_recv(command_dict
)
901 def request_completions(self
, text
, frameId
=None):
902 args_dict
= {"text": text
, "column": len(text
)}
904 args_dict
["frameId"] = frameId
906 "command": "completions",
908 "arguments": args_dict
,
910 return self
.send_recv(command_dict
)
912 def request_modules(self
):
913 return self
.send_recv({"command": "modules", "type": "request"})
915 def request_stackTrace(
916 self
, threadId
=None, startFrame
=None, levels
=None, dump
=False
919 threadId
= self
.get_thread_id()
920 args_dict
= {"threadId": threadId
}
921 if startFrame
is not None:
922 args_dict
["startFrame"] = startFrame
923 if levels
is not None:
924 args_dict
["levels"] = levels
926 "command": "stackTrace",
928 "arguments": args_dict
,
930 response
= self
.send_recv(command_dict
)
932 for idx
, frame
in enumerate(response
["body"]["stackFrames"]):
934 if "line" in frame
and "source" in frame
:
935 source
= frame
["source"]
936 if "sourceReference" not in source
:
938 source_name
= source
["name"]
940 print("[%3u] %s @ %s:%u" % (idx
, name
, source_name
, line
))
942 print("[%3u] %s" % (idx
, name
))
945 def request_threads(self
):
946 """Request a list of all threads and combine any information from any
947 "stopped" events since those contain more information about why a
948 thread actually stopped. Returns an array of thread dictionaries
949 with information about all threads"""
950 command_dict
= {"command": "threads", "type": "request", "arguments": {}}
951 response
= self
.send_recv(command_dict
)
952 body
= response
["body"]
953 # Fill in "self.threads" correctly so that clients that call
954 # self.get_threads() or self.get_thread_id(...) can get information
955 # on threads when the process is stopped.
956 if "threads" in body
:
957 self
.threads
= body
["threads"]
958 for thread
in self
.threads
:
959 # Copy the thread dictionary so we can add key/value pairs to
960 # it without affecting the original info from the "threads"
963 if tid
in self
.thread_stop_reasons
:
964 thread_stop_info
= self
.thread_stop_reasons
[tid
]
965 copy_keys
= ["reason", "description", "text"]
966 for key
in copy_keys
:
967 if key
in thread_stop_info
:
968 thread
[key
] = thread_stop_info
[key
]
973 def request_variables(self
, variablesReference
, start
=None, count
=None):
974 args_dict
= {"variablesReference": variablesReference
}
975 if start
is not None:
976 args_dict
["start"] = start
977 if count
is not None:
978 args_dict
["count"] = count
980 "command": "variables",
982 "arguments": args_dict
,
984 return self
.send_recv(command_dict
)
986 def request_setVariable(self
, containingVarRef
, name
, value
, id=None):
988 "variablesReference": containingVarRef
,
995 "command": "setVariable",
997 "arguments": args_dict
,
999 return self
.send_recv(command_dict
)
1001 def request_testGetTargetBreakpoints(self
):
1002 """A request packet used in the LLDB test suite to get all currently
1003 set breakpoint infos for all breakpoints currently set in the
1007 "command": "_testGetTargetBreakpoints",
1011 return self
.send_recv(command_dict
)
1013 def terminate(self
):
1018 class DebugAdaptorServer(DebugCommunication
):
1028 if executable
is not None:
1029 adaptor_env
= os
.environ
.copy()
1031 adaptor_env
.update(env
)
1034 adaptor_env
["LLDBDAP_LOG"] = log_file
1035 self
.process
= subprocess
.Popen(
1037 stdin
=subprocess
.PIPE
,
1038 stdout
=subprocess
.PIPE
,
1039 stderr
=subprocess
.PIPE
,
1042 DebugCommunication
.__init
__(
1043 self
, self
.process
.stdout
, self
.process
.stdin
, init_commands
, log_file
1045 elif port
is not None:
1046 s
= socket
.socket(socket
.AF_INET
, socket
.SOCK_STREAM
)
1047 s
.connect(("127.0.0.1", port
))
1048 DebugCommunication
.__init
__(
1049 self
, s
.makefile("r"), s
.makefile("w"), init_commands
1054 return self
.process
.pid
1057 def terminate(self
):
1058 super(DebugAdaptorServer
, self
).terminate()
1059 if self
.process
is not None:
1060 self
.process
.terminate()
1065 def attach_options_specified(options
):
1066 if options
.pid
is not None:
1072 if options
.attachCmds
:
1077 def run_vscode(dbg
, args
, options
):
1078 dbg
.request_initialize(options
.sourceInitFile
)
1079 if attach_options_specified(options
):
1080 response
= dbg
.request_attach(
1081 program
=options
.program
,
1083 waitFor
=options
.waitFor
,
1084 attachCommands
=options
.attachCmds
,
1085 initCommands
=options
.initCmds
,
1086 preRunCommands
=options
.preRunCmds
,
1087 stopCommands
=options
.stopCmds
,
1088 exitCommands
=options
.exitCmds
,
1089 terminateCommands
=options
.terminateCmds
,
1092 response
= dbg
.request_launch(
1096 cwd
=options
.workingDir
,
1097 debuggerRoot
=options
.debuggerRoot
,
1098 sourcePath
=options
.sourcePath
,
1099 initCommands
=options
.initCmds
,
1100 preRunCommands
=options
.preRunCmds
,
1101 stopCommands
=options
.stopCmds
,
1102 exitCommands
=options
.exitCmds
,
1103 terminateCommands
=options
.terminateCmds
,
1106 if response
["success"]:
1107 if options
.sourceBreakpoints
:
1108 source_to_lines
= {}
1109 for file_line
in options
.sourceBreakpoints
:
1110 (path
, line
) = file_line
.split(":")
1111 if len(path
) == 0 or len(line
) == 0:
1112 print('error: invalid source with line "%s"' % (file_line
))
1115 if path
in source_to_lines
:
1116 source_to_lines
[path
].append(int(line
))
1118 source_to_lines
[path
] = [int(line
)]
1119 for source
in source_to_lines
:
1120 dbg
.request_setBreakpoints(source
, source_to_lines
[source
])
1121 if options
.funcBreakpoints
:
1122 dbg
.request_setFunctionBreakpoints(options
.funcBreakpoints
)
1123 dbg
.request_configurationDone()
1124 dbg
.wait_for_stopped()
1126 if "message" in response
:
1127 print(response
["message"])
1128 dbg
.request_disconnect(terminateDebuggee
=True)
1132 parser
= optparse
.OptionParser(
1134 "A testing framework for the Visual Studio Code Debug " "Adaptor protocol"
1143 "The path to the command line program that implements the "
1144 "Visual Studio Code Debug Adaptor protocol."
1153 help="The path to the program to debug.",
1162 help="Set the working directory for the process we launch.",
1171 "Set the relative source root for any debug info that has "
1172 "relative paths in it."
1179 dest
="debuggerRoot",
1182 "Set the working directory for lldb-dap for any object files "
1183 "with relative paths in the Mach-o debug map."
1193 "Specify a file containing a packet log to replay with the "
1194 "current Visual Studio Code Debug Adaptor executable."
1202 action
="store_true",
1205 help="Pause waiting for a debugger to attach to the debug adaptor",
1210 action
="store_true",
1211 dest
="sourceInitFile",
1213 help="Whether lldb-dap should source .lldbinit file or not",
1220 help="Attach a socket to a port instead of using STDIN for VSCode",
1228 help="The process ID to attach to",
1234 action
="store_true",
1238 "Specify this option to attach to a process by name. The "
1239 "process name is the basename of the executable specified with "
1240 "the --program option."
1249 dest
="funcBreakpoints",
1251 "Specify the name of a function to break at. "
1252 "Can be specified more than once."
1262 dest
="sourceBreakpoints",
1265 "Specify source breakpoints to set in the format of "
1267 "Can be specified more than once."
1278 "Specify a LLDB command that will attach to a process. "
1279 "Can be specified more than once."
1290 "Specify a LLDB command that will be executed before the target "
1291 "is created. Can be specified more than once."
1302 "Specify a LLDB command that will be executed after the target "
1303 "has been created. Can be specified more than once."
1314 "Specify a LLDB command that will be executed each time the"
1315 "process stops. Can be specified more than once."
1326 "Specify a LLDB command that will be executed when the process "
1327 "exits. Can be specified more than once."
1332 "--terminateCommand",
1335 dest
="terminateCmds",
1338 "Specify a LLDB command that will be executed when the debugging "
1339 "session is terminated. Can be specified more than once."
1349 help=("Specify environment variables to pass to the launched " "process."),
1354 action
="store_true",
1358 "Wait for the next process to be launched whose name matches "
1359 "the basename of the program specified with the --program "
1364 (options
, args
) = parser
.parse_args(sys
.argv
[1:])
1366 if options
.vscode_path
is None and options
.port
is None:
1368 "error: must either specify a path to a Visual Studio Code "
1369 "Debug Adaptor vscode executable path using the --vscode "
1370 "option, or a port to attach to for an existing lldb-dap "
1371 "using the --port option"
1374 dbg
= DebugAdaptorServer(executable
=options
.vscode_path
, port
=options
.port
)
1376 raw_input('Waiting for debugger to attach pid "%i"' % (dbg
.get_pid()))
1378 dbg
.replay_packets(options
.replay
)
1380 run_vscode(dbg
, args
, options
)
1384 if __name__
== "__main__":