[Lldb-commits] [lldb] 114192f - Revert "[lldb-dap] Test Gardening, improving DebugCommunication. (#141689)"
David Spickett via lldb-commits
lldb-commits at lists.llvm.org
Fri May 30 01:49:18 PDT 2025
Author: David Spickett
Date: 2025-05-30T08:48:29Z
New Revision: 114192f586f70d1f7ac91da88061e39524c98ca3
URL: https://github.com/llvm/llvm-project/commit/114192f586f70d1f7ac91da88061e39524c98ca3
DIFF: https://github.com/llvm/llvm-project/commit/114192f586f70d1f7ac91da88061e39524c98ca3.diff
LOG: Revert "[lldb-dap] Test Gardening, improving DebugCommunication. (#141689)"
This reverts commit 8a49db35f45e56c92522c6079e51553e80c07aec.
Due to failures on Arm and AArch64 Linux:
https://lab.llvm.org/buildbot/#/builders/59/builds/18540
https://lab.llvm.org/buildbot/#/builders/18/builds/16759
File "/home/tcwg-buildbot/worker/lldb-aarch64-ubuntu/llvm-project/lldb/test/API/tools/lldb-dap/evaluate/TestDAP_evaluate.py", line 22, in assertEvaluateFailure
self.assertNotIn(
AssertionError: 'result' unexpectedly found in {'memoryReference': '0xFFFFF7CB3060', 'result': '0x0000000000000000', 'type': 'int *', 'variablesReference': 7}
FAIL: test_generic_evaluate_expressions (TestDAP_evaluate.TestDAP_evaluate)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/tcwg-buildbot/worker/lldb-aarch64-ubuntu/llvm-project/lldb/test/API/tools/lldb-dap/evaluate/TestDAP_evaluate.py", line 228, in test_generic_evaluate_expressions
self.run_test_evaluate_expressions(enableAutoVariableSummaries=False)
File "/home/tcwg-buildbot/worker/lldb-aarch64-ubuntu/llvm-project/lldb/test/API/tools/lldb-dap/evaluate/TestDAP_evaluate.py", line 117, in run_test_evaluate_expressions
self.assertEvaluateFailure("list") # local variable of a_function
File "/home/tcwg-buildbot/worker/lldb-aarch64-ubuntu/llvm-project/lldb/test/API/tools/lldb-dap/evaluate/TestDAP_evaluate.py", line 22, in assertEvaluateFailure
self.assertNotIn(
AssertionError: 'result' unexpectedly found in {'memoryReference': '0xFFFFF7CB3060', 'result': '0x0000000000000000', 'type': 'int *', 'variablesReference': 7}
Config=aarch64-/home/tcwg-buildbot/worker/lldb-aarch64-ubuntu/build/bin/clang
The second one is because our bots have the libc debug info package installed,
the first, no idea.
Added:
Modified:
lldb/packages/Python/lldbsuite/test/tools/lldb-dap/dap_server.py
lldb/packages/Python/lldbsuite/test/tools/lldb-dap/lldbdap_testcase.py
lldb/test/API/tools/lldb-dap/cancel/TestDAP_cancel.py
lldb/test/API/tools/lldb-dap/commands/TestDAP_commands.py
lldb/test/API/tools/lldb-dap/console/TestDAP_redirection_to_console.py
lldb/test/API/tools/lldb-dap/variables/TestDAP_variables.py
Removed:
################################################################################
diff --git a/lldb/packages/Python/lldbsuite/test/tools/lldb-dap/dap_server.py b/lldb/packages/Python/lldbsuite/test/tools/lldb-dap/dap_server.py
index 2b18ecc4056f6..4c8c51905e1d0 100644
--- a/lldb/packages/Python/lldbsuite/test/tools/lldb-dap/dap_server.py
+++ b/lldb/packages/Python/lldbsuite/test/tools/lldb-dap/dap_server.py
@@ -12,106 +12,14 @@
import sys
import threading
import time
-from typing import (
- IO,
- Any,
- Optional,
- Union,
- List, # required for python 3.8 compatibility
- Dict, # required for python 3.8 compatibility
- Tuple, # required for python 3.8 compatibility
- TextIO,
- TypedDict,
- Literal,
- Callable,
- TypeVar,
- cast,
- TYPE_CHECKING,
-)
-
-if TYPE_CHECKING:
- # FIXME: Add mypy and typing_extensions to the requirements.txt once all
- # build bots support the library.
- from typing_extensions import Unpack
+from typing import Any, Optional, Union, BinaryIO, TextIO
## DAP type references
-
-T = TypeVar("T")
-
-
-class Event(TypedDict):
- type: Literal["event"]
- seq: Literal[0]
- event: str
- body: Optional[dict]
-
-
-class Request(TypedDict):
- type: Literal["request"]
- seq: int
- command: str
- arguments: Optional[dict]
-
-
-class Response(TypedDict):
- type: Literal["response"]
- seq: Literal[0]
- request_seq: int
- success: bool
- command: str
- message: Optional[str]
- body: Optional[dict]
-
-
-class AttachOrLaunchArguments(TypedDict, total=False):
- stopOnEntry: bool
- disableASLR: bool
- disableSTDIO: bool
- enableAutoVariableSummaries: bool
- displayExtendedBacktrace: bool
- enableSyntheticChildDebugging: bool
- initCommands: List[str]
- preRunCommands: List[str]
- postRunCommands: List[str]
- stopCommands: List[str]
- exitCommands: List[str]
- terminateCommands: List[str]
- sourceMap: Union[List[Tuple[str, str]], Dict[str, str]]
- sourcePath: str
- debuggerRoot: str
- commandEscapePrefix: str
- customFrameFormat: str
- customThreadFormat: str
-
-
-class LaunchArguments(AttachOrLaunchArguments, total=False):
- program: str
- args: List[str]
- cwd: str
- env: Dict[str, str]
- shellExpandArguments: bool
- runInTerminal: bool
- launchCommands: List[str]
-
-
-class AttachArguments(AttachOrLaunchArguments, total=False):
- program: str
- pid: int
- waitFor: bool
- attachCommands: List[str]
- coreFile: str
- gdbRemotePort: int
- gdbRemoteHostname: str
-
-
+Event = dict[str, Any]
+Request = dict[str, Any]
+Response = dict[str, Any]
ProtocolMessage = Union[Event, Request, Response]
-# An internal type used for tracking protocol messages and an EOF sentinel
-# value. 'None' cannot easily be used as a sentinel because it is a falsy
-# value. When returned outside of DebugCommunication an EOFError is typically
-# converted into 'None'.
-_InternalProtocolMessage = Union[Event, Request, Response, EOFError]
-
def dump_memory(base_addr, data, num_per_line, outfile):
data_len = len(data)
@@ -150,42 +58,44 @@ def dump_memory(base_addr, data, num_per_line, outfile):
outfile.write("\n")
-def read_packet(
- f: IO[bytes], verbose=False, trace_file=None
-) -> _InternalProtocolMessage:
+def read_packet(f, verbose=False, trace_file=None):
"""Decode a JSON packet that starts with the content length and is
followed by the JSON bytes from a file 'f'. Returns None on EOF.
"""
- line = f.readline().decode()
+ line = f.readline().decode("utf-8")
if len(line) == 0:
- return EOFError() # EOF.
+ return None # EOF.
# Watch for line that starts with the prefix
prefix = "Content-Length: "
if line.startswith(prefix):
# Decode length of JSON bytes
if verbose:
- print("content:", line)
+ print('content: "%s"' % (line))
length = int(line[len(prefix) :])
if verbose:
- print("length:", length)
+ print('length: "%u"' % (length))
# Skip empty line
- line = f.readline().decode()
+ line = f.readline()
if verbose:
- print("empty:", line)
+ print('empty: "%s"' % (line))
# Read JSON bytes
- json_str = f.read(length).decode()
+ json_str = f.read(length)
if verbose:
- print("json:", json_str)
+ print('json: "%s"' % (json_str))
if trace_file:
- trace_file.write(f"from adapter:\n{json_str}\n")
+ trace_file.write("from adapter:\n%s\n" % (json_str))
# Decode the JSON bytes into a python dictionary
return json.loads(json_str)
raise Exception("unexpected malformed message from lldb-dap: " + line)
-def dump_dap_log(log_file: str):
+def packet_type_is(packet, packet_type):
+ return "type" in packet and packet["type"] == packet_type
+
+
+def dump_dap_log(log_file):
print("========= DEBUG ADAPTER PROTOCOL LOGS =========", file=sys.stderr)
if log_file is None:
print("no log file available", file=sys.stderr)
@@ -214,8 +124,8 @@ def __init__(
def __str__(self):
return f"Source(name={self.name}, path={self.path}), source_reference={self.source_reference})"
- def as_dict(self) -> Dict:
- source_dict: Dict[str, Any] = {}
+ def as_dict(self):
+ source_dict = {}
if self._name is not None:
source_dict["name"] = self._name
if self._path is not None:
@@ -228,48 +138,44 @@ def as_dict(self) -> Dict:
class DebugCommunication(object):
def __init__(
self,
- recv: IO[bytes],
- send: IO[bytes],
- init_commands: List[str],
- log_file: Optional[str] = None,
+ recv: BinaryIO,
+ send: BinaryIO,
+ init_commands: list[str],
+ log_file: Optional[TextIO] = None,
):
# For debugging test failures, try setting `trace_file = sys.stderr`.
self.trace_file: Optional[TextIO] = None
self.log_file = log_file
self.send = send
self.recv = recv
- self.recv_packets: List[_InternalProtocolMessage] = []
+ self.recv_packets: list[Optional[ProtocolMessage]] = []
self.recv_condition = threading.Condition()
self.recv_thread = threading.Thread(target=self._read_packet_thread)
+ self.process_event_body = None
self.exit_status: Optional[int] = None
- self.init_commands = init_commands
self.initialize_body = None
- self.initialized = False
- self.configuration_done_sent = False
- self.process_event_body: Optional[Dict] = None
- self.terminated = False
- self.progress_events: List[Event] = []
- self.reverse_requests: List[Request] = []
+ self.progress_events: list[Event] = []
+ self.reverse_requests = []
self.sequence = 1
- self.output_condition = threading.Condition()
- self.output: Dict[str, List[str]] = {}
-
- # debuggee state
self.threads = None
- self.thread_stop_reasons: Dict[str, Any] = {}
- self.frame_scopes: Dict[str, Any] = {}
-
+ self.thread_stop_reasons = {}
self.recv_thread.start()
+ self.output_condition = threading.Condition()
+ self.output: dict[str, list[str]] = {}
+ self.configuration_done_sent = False
+ self.initialized = False
+ self.frame_scopes = {}
+ self.init_commands = init_commands
@classmethod
def encode_content(cls, s: str) -> bytes:
return ("Content-Length: %u\r\n\r\n%s" % (len(s), s)).encode("utf-8")
@classmethod
- def validate_response(cls, request: Request, response: Response):
- if request["command"] != response["command"]:
+ def validate_response(cls, command, response):
+ if command["command"] != response["command"]:
raise ValueError("command mismatch in response")
- if request["seq"] != response["request_seq"]:
+ if command["seq"] != response["request_seq"]:
raise ValueError("seq mismatch in response")
def _read_packet_thread(self):
@@ -318,91 +224,84 @@ def collect_output(self, category, timeout_secs, pattern, clear=True):
break
return collected_output if collected_output else None
- def _enqueue_recv_packet(self, packet: Union[ProtocolMessage, EOFError]):
+ def _enqueue_recv_packet(self, packet: Optional[ProtocolMessage]):
+ self.recv_condition.acquire()
self.recv_packets.append(packet)
self.recv_condition.notify()
+ self.recv_condition.release()
- def _handle_recv_packet(self, packet: _InternalProtocolMessage) -> bool:
+ def _handle_recv_packet(self, packet: Optional[ProtocolMessage]) -> bool:
"""Called by the read thread that is waiting for all incoming packets
to store the incoming packet in "self.recv_packets" in a thread safe
way. This function will then signal the "self.recv_condition" to
indicate a new packet is available. Returns True if the caller
should keep calling this function for more packets.
"""
- # Hold the recv_condition for consistency of debugger state.
- with self.recv_condition:
- if isinstance(packet, EOFError):
- self._enqueue_recv_packet(packet)
- return False
-
- keep_going = True
-
- # Check the packet to see if is an event packet
- if packet["type"] == "event" and "event" in packet:
- event = packet["event"]
- body = packet.get("body")
- # Handle the event packet and cache DAP stateful information from
- # these packets as they come in.
- if event == "output" and body is not None:
- # Store any output we receive so clients can retrieve it later.
- category = body["category"]
- output = body["output"]
- self.output_condition.acquire()
- if category in self.output:
- self.output[category] += output
- else:
- self.output[category] = output
- self.output_condition.notify()
- self.output_condition.release()
- # no need to add 'output' event packets to our packets list
- return keep_going
- elif event == "initialized":
- self.initialized = True
- elif event == "process" and body is not None:
- # When a new process is attached or launched, remember the
- # details that are available in the body of the event
- self.process_event_body = body
- elif event == "terminated":
- # If we get the 'terminated' event then lldb-dap has exited
- # itself.
- self.terminated = True
- elif event == "exited" and body is not None:
- # Process exited, mark the status to indicate the process is not
- # alive.
- self.exit_status = body.get("exitCode", 0)
- elif event == "continued" and body is not None:
- # When the process continues, clear the known threads and
- # thread_stop_reasons.
- all_threads_continued = body.get("allThreadsContinued", True)
- tid = body["threadId"]
- if tid in self.thread_stop_reasons:
- del self.thread_stop_reasons[tid]
- self._process_continued(all_threads_continued)
- elif event == "stopped" and body is not None:
- # Each thread that stops with a reason will send a
- # 'stopped' event. We need to remember the thread stop
- # reasons since the 'threads' command doesn't return
- # that information.
- self._process_stopped()
- tid = body["threadId"]
- self.thread_stop_reasons[tid] = body
- elif event.startswith("progress"):
- # Progress events come in as 'progressStart', 'progressUpdate',
- # and 'progressEnd' events. Keep these around in case test
- # cases want to verify them.
- self.progress_events.append(packet)
-
- elif packet["type"] == "response":
- if packet["command"] == "disconnect":
- keep_going = False
-
- elif packet["type"] == "request":
- # Handle reverse requests and keep processing.
- self._handle_reverse_request(packet)
- return keep_going
-
- self._enqueue_recv_packet(packet)
- return keep_going
+ # If EOF, notify the read thread by enqueuing a None.
+ if not packet:
+ self._enqueue_recv_packet(None)
+ return False
+
+ # Check the packet to see if is an event packet
+ keepGoing = True
+ packet_type = packet["type"]
+ if packet_type == "event":
+ event = packet["event"]
+ body = None
+ if "body" in packet:
+ body = packet["body"]
+ # Handle the event packet and cache information from these packets
+ # as they come in
+ if event == "output":
+ # Store any output we receive so clients can retrieve it later.
+ category = body["category"]
+ output = body["output"]
+ self.output_condition.acquire()
+ if category in self.output:
+ self.output[category] += output
+ else:
+ self.output[category] = output
+ self.output_condition.notify()
+ self.output_condition.release()
+ # no need to add 'output' event packets to our packets list
+ return keepGoing
+ elif event == "initialized":
+ self.initialized = True
+ elif event == "process":
+ # When a new process is attached or launched, remember the
+ # details that are available in the body of the event
+ self.process_event_body = body
+ elif event == "exited":
+ # Process exited, mark the status to indicate the process is not
+ # alive.
+ self.exit_status = body["exitCode"]
+ elif event == "continued":
+ # When the process continues, clear the known threads and
+ # thread_stop_reasons.
+ all_threads_continued = body.get("allThreadsContinued", True)
+ tid = body["threadId"]
+ if tid in self.thread_stop_reasons:
+ del self.thread_stop_reasons[tid]
+ self._process_continued(all_threads_continued)
+ elif event == "stopped":
+ # Each thread that stops with a reason will send a
+ # 'stopped' event. We need to remember the thread stop
+ # reasons since the 'threads' command doesn't return
+ # that information.
+ self._process_stopped()
+ tid = body["threadId"]
+ self.thread_stop_reasons[tid] = body
+ elif event.startswith("progress"):
+ # Progress events come in as 'progressStart', 'progressUpdate',
+ # and 'progressEnd' events. Keep these around in case test
+ # cases want to verify them.
+ self.progress_events.append(packet)
+
+ elif packet_type == "response":
+ if packet["command"] == "disconnect":
+ keepGoing = False
+ self._enqueue_recv_packet(packet)
+ return keepGoing
def _process_continued(self, all_threads_continued: bool):
self.threads = None
@@ -410,146 +309,125 @@ def _process_continued(self, all_threads_continued: bool):
if all_threads_continued:
self.thread_stop_reasons = {}
- def _handle_reverse_request(self, request: Request):
- self.reverse_requests.append(request)
- arguments = request.get("arguments")
- if request["command"] == "runInTerminal" and arguments is not None:
- in_shell = arguments.get("argsCanBeInterpretedByShell", False)
- proc = subprocess.Popen(
- arguments["args"],
- env=arguments.get("env", {}),
- cwd=arguments["cwd"],
- stdin=subprocess.DEVNULL,
- stdout=subprocess.DEVNULL,
- stderr=subprocess.DEVNULL,
- shell=in_shell,
- )
- body = {}
- if in_shell:
- body["shellProcessId"] = proc.pid
- else:
- body["processId"] = proc.pid
- self.send_packet(
- {
- "type": "response",
- "seq": 0,
- "request_seq": request["seq"],
- "success": True,
- "command": "runInTerminal",
- "message": None,
- "body": body,
- }
- )
- elif request["command"] == "startDebugging":
- self.send_packet(
- {
- "type": "response",
- "seq": 0,
- "request_seq": request["seq"],
- "success": True,
- "message": None,
- "command": "startDebugging",
- "body": {},
- }
- )
- else:
- desc = 'unknown reverse request "%s"' % (request["command"])
- raise ValueError(desc)
-
- def send_packet(self, packet: ProtocolMessage) -> int:
+ def send_packet(self, command_dict: Request, set_sequence=True):
"""Take the "command_dict" python dictionary and encode it as a JSON
string and send the contents as a packet to the VSCode debug
- adapter.
-
- Returns the seq of the packet."""
-
- # Set the seq for requests.
- if packet["type"] == "request":
- packet["seq"] = self.sequence
+ adapter"""
+ # Set the sequence ID for this command automatically
+ if set_sequence:
+ command_dict["seq"] = self.sequence
self.sequence += 1
- else:
- packet["seq"] = 0
-
# Encode our command dictionary as a JSON string
- json_str = json.dumps(packet, separators=(",", ":"))
-
+ json_str = json.dumps(command_dict, separators=(",", ":"))
if self.trace_file:
self.trace_file.write("to adapter:\n%s\n" % (json_str))
-
length = len(json_str)
if length > 0:
# Send the encoded JSON packet and flush the 'send' file
self.send.write(self.encode_content(json_str))
self.send.flush()
- return packet["seq"]
-
- def receive_response(
- self,
- seq: int,
- ) -> Optional[Response]:
- """Waits for the a response with the associated request_sec."""
-
- def predicate(p: Response):
- return p["type"] == "response" and p["request_seq"] == seq
-
- return self._recv_packet(predicate=predicate)
-
- def _recv_packet(
+ def recv_packet(
self,
- *,
- predicate: Callable[[T], bool],
+ filter_type: Optional[str] = None,
+ filter_event: Optional[Union[str, list[str]]] = None,
timeout: Optional[float] = None,
- ) -> Optional[T]:
+ ) -> Optional[ProtocolMessage]:
"""Get a JSON packet from the VSCode debug adapter. This function
assumes a thread that reads packets is running and will deliver
any received packets by calling handle_recv_packet(...). This
function will wait for the packet to arrive and return it when
it does."""
-
- def _predicate():
- return next(
- filter(
- lambda p: isinstance(p, EOFError) or predicate(p),
- self.recv_packets,
- ),
- None,
- )
-
- with self.recv_condition:
- packet = self.recv_condition.wait_for(_predicate, timeout=timeout)
- if packet is None: # Timeout
- return None
- self.recv_packets.remove(packet)
- if isinstance(packet, EOFError):
+ while True:
+ try:
+ self.recv_condition.acquire()
+ packet = None
+ while True:
+ for i, curr_packet in enumerate(self.recv_packets):
+ if not curr_packet:
+ raise EOFError
+ packet_type = curr_packet["type"]
+ if filter_type is None or packet_type in filter_type:
+ if filter_event is None or (
+ packet_type == "event"
+ and curr_packet["event"] in filter_event
+ ):
+ packet = self.recv_packets.pop(i)
+ break
+ if packet:
+ break
+ # Sleep until packet is received
+ len_before = len(self.recv_packets)
+ self.recv_condition.wait(timeout)
+ len_after = len(self.recv_packets)
+ if len_before == len_after:
+ return None # Timed out
+ return packet
+ except EOFError:
return None
- return packet
+ finally:
+ self.recv_condition.release()
- def _send_recv(self, command: Request) -> Optional[Response]:
+ def send_recv(self, command):
"""Send a command python dictionary as JSON and receive the JSON
response. Validates that the response is the correct sequence and
- command in the reply."""
- seq = self.send_packet(command)
- response = self.receive_response(seq)
- if response is None:
- desc = 'no response for "%s"' % (command["command"])
- raise ValueError(desc)
- self.validate_response(command, response)
- return response
+ command in the reply. Any events that are received are added to the
+ events list in this object"""
+ self.send_packet(command)
+ done = False
+ while not done:
+ response_or_request = self.recv_packet(filter_type=["response", "request"])
+ if response_or_request is None:
+ desc = 'no response for "%s"' % (command["command"])
+ raise ValueError(desc)
+ if response_or_request["type"] == "response":
+ self.validate_response(command, response_or_request)
+ return response_or_request
+ else:
+ self.reverse_requests.append(response_or_request)
+ if response_or_request["command"] == "runInTerminal":
+ subprocess.Popen(
+ response_or_request["arguments"]["args"],
+ env=response_or_request["arguments"]["env"],
+ )
+ self.send_packet(
+ {
+ "type": "response",
+ "request_seq": response_or_request["seq"],
+ "success": True,
+ "command": "runInTerminal",
+ "body": {},
+ },
+ )
+ elif response_or_request["command"] == "startDebugging":
+ self.send_packet(
+ {
+ "type": "response",
+ "request_seq": response_or_request["seq"],
+ "success": True,
+ "command": "startDebugging",
+ "body": {},
+ },
+ )
+ else:
+ desc = 'unknown reverse request "%s"' % (
+ response_or_request["command"]
+ )
+ raise ValueError(desc)
+
+ return None
def wait_for_event(
- self, filter: List[str] = [], timeout: Optional[float] = None
+ self, filter: Union[str, list[str]], timeout: Optional[float] = None
) -> Optional[Event]:
"""Wait for the first event that matches the filter."""
-
- def predicate(p: Event):
- return p["type"] == "event" and p["event"] in filter
-
- return self._recv_packet(predicate=predicate, timeout=timeout)
+ return self.recv_packet(
+ filter_type="event", filter_event=filter, timeout=timeout
+ )
def wait_for_stopped(
self, timeout: Optional[float] = None
- ) -> Optional[List[Event]]:
+ ) -> Optional[list[Event]]:
stopped_events = []
stopped_event = self.wait_for_event(
filter=["stopped", "exited"], timeout=timeout
@@ -568,24 +446,22 @@ def wait_for_stopped(
return stopped_events
def wait_for_breakpoint_events(self, timeout: Optional[float] = None):
- breakpoint_events: List[Event] = []
+ breakpoint_events: list[Event] = []
while True:
- event = self.wait_for_event(["breakpoint"], timeout=timeout)
+ event = self.wait_for_event("breakpoint", timeout=timeout)
if not event:
break
breakpoint_events.append(event)
return breakpoint_events
def wait_for_exited(self, timeout: Optional[float] = None):
- event_dict = self.wait_for_event(["exited"], timeout=timeout)
+ event_dict = self.wait_for_event("exited", timeout=timeout)
if event_dict is None:
raise ValueError("didn't get exited event")
return event_dict
def wait_for_terminated(self, timeout: Optional[float] = None):
- if self.terminated:
- raise ValueError("already terminated")
- event_dict = self.wait_for_event(["terminated"], timeout)
+ event_dict = self.wait_for_event("terminated", timeout)
if event_dict is None:
raise ValueError("didn't get terminated event")
return event_dict
@@ -724,7 +600,7 @@ def replay_packets(self, replay_file_path):
raise ValueError("decode packet failed from replay file")
print("Sending:")
pprint.PrettyPrinter(indent=2).pprint(command_dict)
- # input('Press ENTER to send:')
+ # raw_input('Press ENTER to send:')
self.send_packet(command_dict, set_sequence)
mode = "invalid"
elif mode == "recv":
@@ -744,21 +620,59 @@ def replay_packets(self, replay_file_path):
print("error: didn't get a valid response")
mode = "invalid"
- def request_attach(self, **kwargs: "Unpack[AttachArguments]"):
- # Remove any default (empty) values.
- attach_args = cast(AttachArguments, {k: v for k, v in kwargs.items() if v})
- attach_args.setdefault("initCommands", [])
- attach_args["initCommands"] = [
- *self.init_commands,
- *attach_args["initCommands"],
- ]
- command_dict: Request = {
- "command": "attach",
- "type": "request",
- "seq": 0,
- "arguments": cast(dict, attach_args),
- }
- return self._send_recv(command_dict)
+ def request_attach(
+ self,
+ *,
+ program: Optional[str] = None,
+ pid: Optional[int] = None,
+ waitFor=False,
+ initCommands: Optional[list[str]] = None,
+ preRunCommands: Optional[list[str]] = None,
+ attachCommands: Optional[list[str]] = None,
+ postRunCommands: Optional[list[str]] = None,
+ stopCommands: Optional[list[str]] = None,
+ exitCommands: Optional[list[str]] = None,
+ terminateCommands: Optional[list[str]] = None,
+ coreFile: Optional[str] = None,
+ stopOnEntry=False,
+ sourceMap: Optional[Union[list[tuple[str, str]], dict[str, str]]] = None,
+ gdbRemotePort: Optional[int] = None,
+ gdbRemoteHostname: Optional[str] = None,
+ ):
+ args_dict = {}
+ if pid is not None:
+ args_dict["pid"] = pid
+ if program is not None:
+ args_dict["program"] = program
+ if waitFor:
+ args_dict["waitFor"] = waitFor
+ args_dict["initCommands"] = self.init_commands
+ if initCommands:
+ args_dict["initCommands"].extend(initCommands)
+ if preRunCommands:
+ args_dict["preRunCommands"] = preRunCommands
+ if stopCommands:
+ args_dict["stopCommands"] = stopCommands
+ if exitCommands:
+ args_dict["exitCommands"] = exitCommands
+ if terminateCommands:
+ args_dict["terminateCommands"] = terminateCommands
+ if attachCommands:
+ args_dict["attachCommands"] = attachCommands
+ if coreFile:
+ args_dict["coreFile"] = coreFile
+ if stopOnEntry:
+ args_dict["stopOnEntry"] = stopOnEntry
+ if postRunCommands:
+ args_dict["postRunCommands"] = postRunCommands
+ if sourceMap:
+ args_dict["sourceMap"] = sourceMap
+ if gdbRemotePort is not None:
+ args_dict["gdb-remote-port"] = gdbRemotePort
+ if gdbRemoteHostname is not None:
+ args_dict["gdb-remote-hostname"] = gdbRemoteHostname
+ command_dict = {"command": "attach", "type": "request", "arguments": args_dict}
+ return self.send_recv(command_dict)
def request_breakpointLocations(
self, file_path, line, end_line=None, column=None, end_column=None
@@ -780,7 +694,7 @@ def request_breakpointLocations(
"type": "request",
"arguments": args_dict,
}
- return self._send_recv(command_dict)
+ return self.send_recv(command_dict)
def request_configurationDone(self):
command_dict = {
@@ -788,7 +702,7 @@ def request_configurationDone(self):
"type": "request",
"arguments": {},
}
- response = self._send_recv(command_dict)
+ response = self.send_recv(command_dict)
if response:
self.configuration_done_sent = True
self.request_threads()
@@ -817,7 +731,7 @@ def request_continue(self, threadId=None, singleThread=False):
"type": "request",
"arguments": args_dict,
}
- response = self._send_recv(command_dict)
+ response = self.send_recv(command_dict)
if response["success"]:
self._process_continued(response["body"]["allThreadsContinued"])
# Caller must still call wait_for_stopped.
@@ -831,24 +745,23 @@ def request_restart(self, restartArguments=None):
if restartArguments:
command_dict["arguments"] = restartArguments
- response = self._send_recv(command_dict)
+ response = self.send_recv(command_dict)
# Caller must still call wait_for_stopped.
return response
- def request_disconnect(
- self,
- terminateDebuggee: Optional[bool] = None,
- ):
+ def request_disconnect(self, terminateDebuggee=None):
args_dict = {}
if terminateDebuggee is not None:
- args_dict["terminateDebuggee"] = terminateDebuggee
- command_dict: Request = {
+ if terminateDebuggee:
+ args_dict["terminateDebuggee"] = True
+ else:
+ args_dict["terminateDebuggee"] = False
+ command_dict = {
"command": "disconnect",
"type": "request",
- "seq": 0,
"arguments": args_dict,
}
- return self._send_recv(command_dict)
+ return self.send_recv(command_dict)
def request_disassemble(
self,
@@ -868,7 +781,7 @@ def request_disassemble(
"type": "request",
"arguments": args_dict,
}
- return self._send_recv(command_dict)["body"]["instructions"]
+ return self.send_recv(command_dict)["body"]["instructions"]
def request_readMemory(self, memoryReference, offset, count):
args_dict = {
@@ -881,7 +794,7 @@ def request_readMemory(self, memoryReference, offset, count):
"type": "request",
"arguments": args_dict,
}
- return self._send_recv(command_dict)
+ return self.send_recv(command_dict)
def request_evaluate(self, expression, frameIndex=0, threadId=None, context=None):
stackFrame = self.get_stackFrame(frameIndex=frameIndex, threadId=threadId)
@@ -897,7 +810,7 @@ def request_evaluate(self, expression, frameIndex=0, threadId=None, context=None
"type": "request",
"arguments": args_dict,
}
- return self._send_recv(command_dict)
+ return self.send_recv(command_dict)
def request_exceptionInfo(self, threadId=None):
if threadId is None:
@@ -908,7 +821,7 @@ def request_exceptionInfo(self, threadId=None):
"type": "request",
"arguments": args_dict,
}
- return self._send_recv(command_dict)
+ return self.send_recv(command_dict)
def request_initialize(self, sourceInitFile=False):
command_dict = {
@@ -929,35 +842,97 @@ def request_initialize(self, sourceInitFile=False):
"$__lldb_sourceInitFile": sourceInitFile,
},
}
- response = self._send_recv(command_dict)
+ response = self.send_recv(command_dict)
if response:
if "body" in response:
self.initialize_body = response["body"]
return response
- def request_launch(self, **kwargs: "Unpack[LaunchArguments]"):
- # Remove any default (empty) values.
- launch_args = cast(LaunchArguments, {k: v for k, v in kwargs.items() if v})
- launch_args.setdefault("initCommands", [])
- launch_args["initCommands"] = [
- *self.init_commands,
- *launch_args["initCommands"],
- ]
-
- command_dict: Request = {
- "type": "request",
- "seq": 0,
- "command": "launch",
- "arguments": cast(dict, kwargs),
- }
- return self._send_recv(command_dict)
+ def request_launch(
+ self,
+ program: str,
+ *,
+ args: Optional[list[str]] = None,
+ cwd: Optional[str] = None,
+ env: Optional[dict[str, str]] = None,
+ stopOnEntry=False,
+ disableASLR=False,
+ disableSTDIO=False,
+ shellExpandArguments=False,
+ runInTerminal=False,
+ enableAutoVariableSummaries=False,
+ displayExtendedBacktrace=False,
+ enableSyntheticChildDebugging=False,
+ initCommands: Optional[list[str]] = None,
+ preRunCommands: Optional[list[str]] = None,
+ launchCommands: Optional[list[str]] = None,
+ postRunCommands: Optional[list[str]] = None,
+ stopCommands: Optional[list[str]] = None,
+ exitCommands: Optional[list[str]] = None,
+ terminateCommands: Optional[list[str]] = None,
+ sourceMap: Optional[Union[list[tuple[str, str]], dict[str, str]]] = None,
+ sourcePath: Optional[str] = None,
+ debuggerRoot: Optional[str] = None,
+ commandEscapePrefix: Optional[str] = None,
+ customFrameFormat: Optional[str] = None,
+ customThreadFormat: Optional[str] = None,
+ ):
+ args_dict = {"program": program}
+ if args:
+ args_dict["args"] = args
+ if cwd:
+ args_dict["cwd"] = cwd
+ if env:
+ args_dict["env"] = env
+ if stopOnEntry:
+ args_dict["stopOnEntry"] = stopOnEntry
+ if disableSTDIO:
+ args_dict["disableSTDIO"] = disableSTDIO
+ if shellExpandArguments:
+ args_dict["shellExpandArguments"] = shellExpandArguments
+ args_dict["initCommands"] = self.init_commands
+ if initCommands:
+ args_dict["initCommands"].extend(initCommands)
+ if preRunCommands:
+ args_dict["preRunCommands"] = preRunCommands
+ if stopCommands:
+ args_dict["stopCommands"] = stopCommands
+ if exitCommands:
+ args_dict["exitCommands"] = exitCommands
+ if terminateCommands:
+ args_dict["terminateCommands"] = terminateCommands
+ if sourcePath:
+ args_dict["sourcePath"] = sourcePath
+ if debuggerRoot:
+ args_dict["debuggerRoot"] = debuggerRoot
+ if launchCommands:
+ args_dict["launchCommands"] = launchCommands
+ if sourceMap:
+ args_dict["sourceMap"] = sourceMap
+ if runInTerminal:
+ args_dict["runInTerminal"] = runInTerminal
+ if postRunCommands:
+ args_dict["postRunCommands"] = postRunCommands
+ if customFrameFormat:
+ args_dict["customFrameFormat"] = customFrameFormat
+ if customThreadFormat:
+ args_dict["customThreadFormat"] = customThreadFormat
+
+ args_dict["disableASLR"] = disableASLR
+ args_dict["enableAutoVariableSummaries"] = enableAutoVariableSummaries
+ args_dict["enableSyntheticChildDebugging"] = enableSyntheticChildDebugging
+ args_dict["displayExtendedBacktrace"] = displayExtendedBacktrace
+ if commandEscapePrefix is not None:
+ args_dict["commandEscapePrefix"] = commandEscapePrefix
+ command_dict = {"command": "launch", "type": "request", "arguments": args_dict}
+ return self.send_recv(command_dict)
def request_next(self, threadId, granularity="statement"):
if self.exit_status is not None:
raise ValueError("request_continue called after process exited")
args_dict = {"threadId": threadId, "granularity": granularity}
command_dict = {"command": "next", "type": "request", "arguments": args_dict}
- return self._send_recv(command_dict)
+ return self.send_recv(command_dict)
def request_stepIn(self, threadId, targetId, granularity="statement"):
if self.exit_status is not None:
@@ -970,7 +945,7 @@ def request_stepIn(self, threadId, targetId, granularity="statement"):
"granularity": granularity,
}
command_dict = {"command": "stepIn", "type": "request", "arguments": args_dict}
- return self._send_recv(command_dict)
+ return self.send_recv(command_dict)
def request_stepInTargets(self, frameId):
if self.exit_status is not None:
@@ -981,14 +956,14 @@ def request_stepInTargets(self, frameId):
"type": "request",
"arguments": args_dict,
}
- return self._send_recv(command_dict)
+ return self.send_recv(command_dict)
def request_stepOut(self, threadId):
if self.exit_status is not None:
raise ValueError("request_stepOut called after process exited")
args_dict = {"threadId": threadId}
command_dict = {"command": "stepOut", "type": "request", "arguments": args_dict}
- return self._send_recv(command_dict)
+ return self.send_recv(command_dict)
def request_pause(self, threadId=None):
if self.exit_status is not None:
@@ -997,12 +972,12 @@ def request_pause(self, threadId=None):
threadId = self.get_thread_id()
args_dict = {"threadId": threadId}
command_dict = {"command": "pause", "type": "request", "arguments": args_dict}
- return self._send_recv(command_dict)
+ return self.send_recv(command_dict)
def request_scopes(self, frameId):
args_dict = {"frameId": frameId}
command_dict = {"command": "scopes", "type": "request", "arguments": args_dict}
- return self._send_recv(command_dict)
+ return self.send_recv(command_dict)
def request_setBreakpoints(self, source: Source, line_array, data=None):
"""data is array of parameters for breakpoints in line_array.
@@ -1033,13 +1008,12 @@ def request_setBreakpoints(self, source: Source, line_array, data=None):
breakpoints.append(bp)
args_dict["breakpoints"] = breakpoints
- command_dict: Request = {
+ command_dict = {
"command": "setBreakpoints",
"type": "request",
- "seq": 0,
"arguments": args_dict,
}
- return self._send_recv(command_dict)
+ return self.send_recv(command_dict)
def request_setExceptionBreakpoints(self, filters):
args_dict = {"filters": filters}
@@ -1048,7 +1022,7 @@ def request_setExceptionBreakpoints(self, filters):
"type": "request",
"arguments": args_dict,
}
- return self._send_recv(command_dict)
+ return self.send_recv(command_dict)
def request_setFunctionBreakpoints(self, names, condition=None, hitCondition=None):
breakpoints = []
@@ -1065,7 +1039,7 @@ def request_setFunctionBreakpoints(self, names, condition=None, hitCondition=Non
"type": "request",
"arguments": args_dict,
}
- return self._send_recv(command_dict)
+ return self.send_recv(command_dict)
def request_dataBreakpointInfo(
self, variablesReference, name, frameIndex=0, threadId=None
@@ -1083,7 +1057,7 @@ def request_dataBreakpointInfo(
"type": "request",
"arguments": args_dict,
}
- return self._send_recv(command_dict)
+ return self.send_recv(command_dict)
def request_setDataBreakpoint(self, dataBreakpoints):
"""dataBreakpoints is a list of dictionary with following fields:
@@ -1100,7 +1074,7 @@ def request_setDataBreakpoint(self, dataBreakpoints):
"type": "request",
"arguments": args_dict,
}
- return self._send_recv(command_dict)
+ return self.send_recv(command_dict)
def request_compileUnits(self, moduleId):
args_dict = {"moduleId": moduleId}
@@ -1109,7 +1083,7 @@ def request_compileUnits(self, moduleId):
"type": "request",
"arguments": args_dict,
}
- response = self._send_recv(command_dict)
+ response = self.send_recv(command_dict)
return response
def request_completions(self, text, frameId=None):
@@ -1121,10 +1095,10 @@ def request_completions(self, text, frameId=None):
"type": "request",
"arguments": args_dict,
}
- return self._send_recv(command_dict)
+ return self.send_recv(command_dict)
def request_modules(self):
- return self._send_recv({"command": "modules", "type": "request"})
+ return self.send_recv({"command": "modules", "type": "request"})
def request_stackTrace(
self, threadId=None, startFrame=None, levels=None, format=None, dump=False
@@ -1143,7 +1117,7 @@ def request_stackTrace(
"type": "request",
"arguments": args_dict,
}
- response = self._send_recv(command_dict)
+ response = self.send_recv(command_dict)
if dump:
for idx, frame in enumerate(response["body"]["stackFrames"]):
name = frame["name"]
@@ -1169,7 +1143,7 @@ def request_source(self, sourceReference):
"sourceReference": sourceReference,
},
}
- return self._send_recv(command_dict)
+ return self.send_recv(command_dict)
def request_threads(self):
"""Request a list of all threads and combine any information from any
@@ -1177,7 +1151,7 @@ def request_threads(self):
thread actually stopped. Returns an array of thread dictionaries
with information about all threads"""
command_dict = {"command": "threads", "type": "request", "arguments": {}}
- response = self._send_recv(command_dict)
+ response = self.send_recv(command_dict)
body = response["body"]
# Fill in "self.threads" correctly so that clients that call
# self.get_threads() or self.get_thread_id(...) can get information
@@ -1214,7 +1188,7 @@ def request_variables(
"type": "request",
"arguments": args_dict,
}
- return self._send_recv(command_dict)
+ return self.send_recv(command_dict)
def request_setVariable(self, containingVarRef, name, value, id=None):
args_dict = {
@@ -1229,7 +1203,7 @@ def request_setVariable(self, containingVarRef, name, value, id=None):
"type": "request",
"arguments": args_dict,
}
- return self._send_recv(command_dict)
+ return self.send_recv(command_dict)
def request_locations(self, locationReference):
args_dict = {
@@ -1240,7 +1214,7 @@ def request_locations(self, locationReference):
"type": "request",
"arguments": args_dict,
}
- return self._send_recv(command_dict)
+ return self.send_recv(command_dict)
def request_testGetTargetBreakpoints(self):
"""A request packet used in the LLDB test suite to get all currently
@@ -1252,7 +1226,7 @@ def request_testGetTargetBreakpoints(self):
"type": "request",
"arguments": {},
}
- return self._send_recv(command_dict)
+ return self.send_recv(command_dict)
def terminate(self):
self.send.close()
@@ -1272,7 +1246,7 @@ def request_setInstructionBreakpoints(self, memory_reference=[]):
"type": "request",
"arguments": args_dict,
}
- return self._send_recv(command_dict)
+ return self.send_recv(command_dict)
class DebugAdapterServer(DebugCommunication):
@@ -1280,11 +1254,11 @@ def __init__(
self,
executable: Optional[str] = None,
connection: Optional[str] = None,
- init_commands: List[str] = [],
- log_file: Optional[str] = None,
- env: Optional[Dict[str, str]] = None,
+ init_commands: list[str] = [],
+ log_file: Optional[TextIO] = None,
+ env: Optional[dict[str, str]] = None,
):
- self.process: Optional[subprocess.Popen[bytes]] = None
+ self.process = None
self.connection = None
if executable is not None:
process, connection = DebugAdapterServer.launch(
@@ -1308,7 +1282,7 @@ def __init__(
self, s.makefile("rb"), s.makefile("wb"), init_commands, log_file
)
self.connection = connection
- elif self.process and self.process.stdout and self.process.stdin:
+ else:
DebugCommunication.__init__(
self, self.process.stdout, self.process.stdin, init_commands, log_file
)
@@ -1318,10 +1292,10 @@ def launch(
cls,
*,
executable: str,
- env: Optional[Dict[str, str]] = None,
- log_file: Optional[str] = None,
+ env: Optional[dict[str, str]] = None,
+ log_file: Optional[TextIO] = None,
connection: Optional[str] = None,
- ) -> Tuple[subprocess.Popen[bytes], Optional[str]]:
+ ) -> tuple[subprocess.Popen, Optional[str]]:
adapter_env = os.environ.copy()
if env is not None:
adapter_env.update(env)
@@ -1345,8 +1319,6 @@ def launch(
if connection is None:
return (process, None)
- assert process.stdout
-
# lldb-dap will print the listening address once the listener is
# made to stdout. The listener is formatted like
# `connection://host:port` or `unix-connection:///path`.
@@ -1361,9 +1333,7 @@ def launch(
)
# If the listener expanded into multiple addresses, use the first.
- if out.startswith(expected_prefix):
- out = out[len(expected_prefix) :]
- connection = out.rstrip("\r\n").split(",", 1)[0]
+ connection = out.removeprefix(expected_prefix).rstrip("\r\n").split(",", 1)[0]
return (process, connection)
@@ -1726,7 +1696,7 @@ def main():
executable=options.vscode_path, connection=options.connection
)
if options.debug:
- input('Waiting for debugger to attach pid "%i"' % (dbg.get_pid()))
+ raw_input('Waiting for debugger to attach pid "%i"' % (dbg.get_pid()))
if options.replay:
dbg.replay_packets(options.replay)
else:
diff --git a/lldb/packages/Python/lldbsuite/test/tools/lldb-dap/lldbdap_testcase.py b/lldb/packages/Python/lldbsuite/test/tools/lldb-dap/lldbdap_testcase.py
index 5a1736fcbbf0b..91ae55977046b 100644
--- a/lldb/packages/Python/lldbsuite/test/tools/lldb-dap/lldbdap_testcase.py
+++ b/lldb/packages/Python/lldbsuite/test/tools/lldb-dap/lldbdap_testcase.py
@@ -1,20 +1,10 @@
import os
import time
-from typing import Dict, Optional, TYPE_CHECKING
+from typing import Optional
import uuid
-if TYPE_CHECKING:
- # FIXME: Add mypy and typing_extensions to the requirements.txt once all
- # build bots support the library.
- from typing_extensions import Unpack
-
-from dap_server import (
- DebugAdapterServer,
- Source,
- Response,
- AttachArguments,
- LaunchArguments,
-)
+import dap_server
+from dap_server import Source
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbplatformutil
import lldbgdbserverutils
@@ -28,7 +18,7 @@ class DAPTestCaseBase(TestBase):
def create_debug_adapter(
self,
- env: Optional[Dict[str, str]] = None,
+ lldbDAPEnv: Optional[dict[str, str]] = None,
connection: Optional[str] = None,
):
"""Create the Visual Studio Code debug adapter"""
@@ -36,21 +26,21 @@ def create_debug_adapter(
is_exe(self.lldbDAPExec), "lldb-dap must exist and be executable"
)
log_file_path = self.getBuildArtifact("dap.txt")
- self.dap_server = DebugAdapterServer(
+ self.dap_server = dap_server.DebugAdapterServer(
executable=self.lldbDAPExec,
connection=connection,
init_commands=self.setUpCommands(),
log_file=log_file_path,
- env=env,
+ env=lldbDAPEnv,
)
def build_and_create_debug_adapter(
self,
- adapter_env: Optional[Dict[str, str]] = None,
- dictionary: Optional[Dict] = None,
+ lldbDAPEnv: Optional[dict[str, str]] = None,
+ dictionary: Optional[dict] = None,
):
self.build(dictionary=dictionary)
- self.create_debug_adapter(adapter_env)
+ self.create_debug_adapter(lldbDAPEnv)
def build_and_create_debug_adapter_for_attach(self):
"""Variant of build_and_create_debug_adapter that builds a uniquely
@@ -114,18 +104,6 @@ def waitUntil(self, condition_callback):
time.sleep(0.5)
return False
- def assertResponseSuccess(self, response: Response):
- self.assertIsNotNone(response)
- self.assertIn("success", response)
- if not response.get("success", False):
- cmd = response.get("command", "<not set>")
- msg = f"command ({cmd}) failed"
- if "message" in response:
- msg += " " + str(response["message"])
- if "body" in response and response["body"] and "error" in response["body"]:
- msg += " " + str(response["body"]["error"]["format"])
- self.fail(msg)
-
def verify_breakpoint_hit(self, breakpoint_ids, timeout=DEFAULT_TIMEOUT):
"""Wait for the process we are debugging to stop, and verify we hit
any breakpoint location in the "breakpoint_ids" array.
@@ -403,7 +381,7 @@ def attach(
disconnectAutomatically=True,
sourceInitFile=False,
expectFailure=False,
- **kwargs: "Unpack[AttachArguments]",
+ **kwargs,
):
"""Build the default Makefile target, create the DAP debug adapter,
and attach to the process.
@@ -430,13 +408,12 @@ def cleanup():
def launch(
self,
- program: str,
- /,
+ program=None,
*,
sourceInitFile=False,
disconnectAutomatically=True,
expectFailure=False,
- **kwargs: "Unpack[LaunchArguments]",
+ **kwargs,
):
"""Sending launch request to dap"""
@@ -452,8 +429,7 @@ def cleanup():
# Initialize and launch the program
self.dap_server.request_initialize(sourceInitFile)
- kwargs["program"] = program
- response = self.dap_server.request_launch(**kwargs)
+ response = self.dap_server.request_launch(program, **kwargs)
if expectFailure:
return response
if not (response and response["success"]):
@@ -464,17 +440,17 @@ def cleanup():
def build_and_launch(
self,
- program: str,
- /,
+ program,
*,
- adapter_env: Optional[Dict[str, str]] = None,
- **kwargs: "Unpack[LaunchArguments]",
+ lldbDAPEnv: Optional[dict[str, str]] = None,
+ **kwargs,
):
"""Build the default Makefile target, create the DAP debug adapter,
and launch the process.
"""
- self.build_and_create_debug_adapter(adapter_env)
+ self.build_and_create_debug_adapter(lldbDAPEnv)
self.assertTrue(os.path.exists(program), "executable must exist")
+
return self.launch(program, **kwargs)
def getBuiltinDebugServerTool(self):
diff --git a/lldb/test/API/tools/lldb-dap/cancel/TestDAP_cancel.py b/lldb/test/API/tools/lldb-dap/cancel/TestDAP_cancel.py
index c183a465d53f2..824ed8fe3bb97 100644
--- a/lldb/test/API/tools/lldb-dap/cancel/TestDAP_cancel.py
+++ b/lldb/test/API/tools/lldb-dap/cancel/TestDAP_cancel.py
@@ -11,14 +11,15 @@
class TestDAP_cancel(lldbdap_testcase.DAPTestCaseBase):
def send_async_req(self, command: str, arguments={}) -> int:
- return self.dap_server.send_packet(
+ seq = self.dap_server.sequence
+ self.dap_server.send_packet(
{
"type": "request",
- "seq": 0,
"command": command,
"arguments": arguments,
}
)
+ return seq
def async_blocking_request(self, duration: float) -> int:
"""
@@ -53,17 +54,21 @@ def test_pending_request(self):
pending_seq = self.async_blocking_request(duration=self.DEFAULT_TIMEOUT / 2)
cancel_seq = self.async_cancel(requestId=pending_seq)
- blocking_resp = self.dap_server.receive_response(blocking_seq)
- self.assertResponseSuccess(blocking_resp)
+ blocking_resp = self.dap_server.recv_packet(filter_type=["response"])
+ self.assertEqual(blocking_resp["request_seq"], blocking_seq)
+ self.assertEqual(blocking_resp["command"], "evaluate")
+ self.assertEqual(blocking_resp["success"], True)
- pending_resp = self.dap_server.receive_response(pending_seq)
+ pending_resp = self.dap_server.recv_packet(filter_type=["response"])
self.assertEqual(pending_resp["request_seq"], pending_seq)
self.assertEqual(pending_resp["command"], "evaluate")
self.assertEqual(pending_resp["success"], False)
self.assertEqual(pending_resp["message"], "cancelled")
- cancel_resp = self.dap_server.receive_response(cancel_seq)
- self.assertResponseSuccess(cancel_resp)
+ cancel_resp = self.dap_server.recv_packet(filter_type=["response"])
+ self.assertEqual(cancel_resp["request_seq"], cancel_seq)
+ self.assertEqual(cancel_resp["command"], "cancel")
+ self.assertEqual(cancel_resp["success"], True)
self.continue_to_exit()
def test_inflight_request(self):
@@ -81,12 +86,14 @@ def test_inflight_request(self):
)
cancel_seq = self.async_cancel(requestId=blocking_seq)
- blocking_resp = self.dap_server.receive_response(blocking_seq)
+ blocking_resp = self.dap_server.recv_packet(filter_type=["response"])
self.assertEqual(blocking_resp["request_seq"], blocking_seq)
self.assertEqual(blocking_resp["command"], "evaluate")
self.assertEqual(blocking_resp["success"], False)
self.assertEqual(blocking_resp["message"], "cancelled")
- cancel_resp = self.dap_server.receive_response(cancel_seq)
- self.assertResponseSuccess(cancel_resp)
+ cancel_resp = self.dap_server.recv_packet(filter_type=["response"])
+ self.assertEqual(cancel_resp["request_seq"], cancel_seq)
+ self.assertEqual(cancel_resp["command"], "cancel")
+ self.assertEqual(cancel_resp["success"], True)
self.continue_to_exit()
diff --git a/lldb/test/API/tools/lldb-dap/commands/TestDAP_commands.py b/lldb/test/API/tools/lldb-dap/commands/TestDAP_commands.py
index 10edce3620fcf..ea6b2ea7f28ab 100644
--- a/lldb/test/API/tools/lldb-dap/commands/TestDAP_commands.py
+++ b/lldb/test/API/tools/lldb-dap/commands/TestDAP_commands.py
@@ -44,10 +44,10 @@ def do_test_abort_on_error(
commands = ["?!" + command_quiet, "!" + command_abort_on_error]
self.build_and_launch(
program,
- initCommands=commands if use_init_commands else [],
- launchCommands=commands if use_launch_commands else [],
- preRunCommands=commands if use_pre_run_commands else [],
- postRunCommands=commands if use_post_run_commands else [],
+ initCommands=commands if use_init_commands else None,
+ launchCommands=commands if use_launch_commands else None,
+ preRunCommands=commands if use_pre_run_commands else None,
+ postRunCommands=commands if use_post_run_commands else None,
expectFailure=True,
)
full_output = self.collect_console(
diff --git a/lldb/test/API/tools/lldb-dap/console/TestDAP_redirection_to_console.py b/lldb/test/API/tools/lldb-dap/console/TestDAP_redirection_to_console.py
index b6af471146338..e367c327d4295 100644
--- a/lldb/test/API/tools/lldb-dap/console/TestDAP_redirection_to_console.py
+++ b/lldb/test/API/tools/lldb-dap/console/TestDAP_redirection_to_console.py
@@ -16,7 +16,7 @@ def test(self):
"""
program = self.getBuildArtifact("a.out")
self.build_and_launch(
- program, adapter_env={"LLDB_DAP_TEST_STDOUT_STDERR_REDIRECTION": "1"}
+ program, lldbDAPEnv={"LLDB_DAP_TEST_STDOUT_STDERR_REDIRECTION": ""}
)
source = "main.cpp"
diff --git a/lldb/test/API/tools/lldb-dap/variables/TestDAP_variables.py b/lldb/test/API/tools/lldb-dap/variables/TestDAP_variables.py
index 3a7921007c4d2..340be0b39010d 100644
--- a/lldb/test/API/tools/lldb-dap/variables/TestDAP_variables.py
+++ b/lldb/test/API/tools/lldb-dap/variables/TestDAP_variables.py
@@ -104,7 +104,7 @@ def verify_variables(self, verify_dict, variables, varref_dict=None):
)
self.verify_values(verify_dict[name], variable, varref_dict)
- def darwin_dwarf_missing_obj(self, initCommands=[]):
+ def darwin_dwarf_missing_obj(self, initCommands):
self.build(debug_info="dwarf")
program = self.getBuildArtifact("a.out")
main_obj = self.getBuildArtifact("main.o")
@@ -791,13 +791,13 @@ def test_darwin_dwarf_missing_obj(self):
"""
Test that if we build a binary with DWARF in .o files and we remove
the .o file for main.cpp, that we get a variable named "<error>"
- whose value matches the appropriate error. Errors when getting
+ whose value matches the appriopriate error. Errors when getting
variables are returned in the LLDB API when the user should be
notified of issues that can easily be solved by rebuilding or
changing compiler options and are designed to give better feedback
to the user.
"""
- self.darwin_dwarf_missing_obj([])
+ self.darwin_dwarf_missing_obj(None)
@no_debug_info_test
@skipUnlessDarwin
More information about the lldb-commits
mailing list