diff --git a/.vscode/launch.json b/.vscode/launch.json index bba9a2f55..1b91fbb9f 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -11,7 +11,7 @@ "console": "integratedTerminal", "consoleTitle": "ptvsd.adapter", "program": "${workspaceFolder}/src/ptvsd/adapter", - "args": ["--port", "8765", "--cls"], + "args": ["--port", "8765", "--log-stderr"], "customDebugger": true, }, diff --git a/clean.cmd b/clean.cmd index 5339bf3ff..7c56212e0 100644 --- a/clean.cmd +++ b/clean.cmd @@ -3,7 +3,7 @@ pushd %~dp0 del /s /q *.pyc del /s /q *.pyo -for /d /r %%i in (__pycache__.*) do rd %%i +for /d /r %%i in (__pycache__.*) do rd "%%i" popd pushd %~dp0\src diff --git a/src/ptvsd/adapter/__main__.py b/src/ptvsd/adapter/__main__.py index 687caafe4..dca80e859 100644 --- a/src/ptvsd/adapter/__main__.py +++ b/src/ptvsd/adapter/__main__.py @@ -20,32 +20,24 @@ def main(args): from ptvsd.common import log, options as common_options from ptvsd.adapter import session, options as adapter_options - if args.cls: - sys.stderr.write("\033c") if args.log_stderr: + log.stderr_levels |= set(log.LEVELS) adapter_options.log_stderr = True if args.log_dir is not None: common_options.log_dir = args.log_dir - log.filename_prefix = "ptvsd.adapter" - log.stderr_levels |= {"info"} - log.to_file() + log.to_file(prefix="ptvsd.adapter") log.describe_environment("ptvsd.adapter startup environment:") session = session.Session() if args.port is None: session.connect_to_ide() else: - # If in debugServer mode, log everything to stderr. - log.stderr_levels |= set(log.LEVELS) - if args.for_server_on_port is not None: session.connect_to_server(("127.0.0.1", args.for_server_on_port)) with session.accept_connection_from_ide((args.host, args.port)) as (_, port): - try: + if session.server: session.server.set_debugger_property({"adapterPort": port}) - except AttributeError: - pass session.wait_for_completion() @@ -76,10 +68,6 @@ def _parse_argv(argv): help=argparse.SUPPRESS ) - parser.add_argument( - "--cls", action="store_true", help="clear screen before starting the debuggee" - ) - parser.add_argument( "--log-dir", type=str, diff --git a/src/ptvsd/adapter/ide.py b/src/ptvsd/adapter/ide.py index 35078a43a..21f0a7052 100644 --- a/src/ptvsd/adapter/ide.py +++ b/src/ptvsd/adapter/ide.py @@ -208,6 +208,8 @@ def launch_request(self, request): args = ["-c"] + request( "code", json.array(unicode, vectorize=True, size=(1,)) ) + else: + args = [] args += request("args", json.array(unicode)) console = request( @@ -232,12 +234,13 @@ def attach_request(self, request): if self.session.no_debug: raise request.isnt_valid('"noDebug" is not supported for "attach"') - - pid = request("processId", int, optional=True) if pid == (): - if self.server is not None: - # we are already connected to the debug server + # When the adapter is spawned by the debug server, it is connected to the + # latter from the get go, and "host" and "port" in the "attach" request + # are actually the host and port on which the adapter itself was listening, + # so we can ignore those. + if self.server: return host = request("host", "127.0.0.1") @@ -248,8 +251,11 @@ def attach_request(self, request): else: self.session.connect_to_server((host, port)) else: - if self.server is not None: - raise request.isnt_valid("Session is already started") + if self.server: + raise request.isnt_valid( + '"attach" with "processId" cannot be serviced by adapter ' + "that is already associated with a debug server" + ) ptvsd_args = request("ptvsdArgs", json.array(unicode)) self.session.inject_server(pid, ptvsd_args) diff --git a/src/ptvsd/adapter/session.py b/src/ptvsd/adapter/session.py index 7f9a273a9..cc9ad2552 100644 --- a/src/ptvsd/adapter/session.py +++ b/src/ptvsd/adapter/session.py @@ -14,7 +14,15 @@ import ptvsd import ptvsd.launcher -from ptvsd.common import compat, fmt, log, messaging, options as common_options, sockets, util +from ptvsd.common import ( + compat, + fmt, + log, + messaging, + options as common_options, + sockets, + util, +) from ptvsd.adapter import components, ide, launcher, options as adapter_options, server @@ -126,6 +134,7 @@ def connect_to_ide(self): """Sets up a DAP message channel to the IDE over stdio. """ + log.info("{0} connecting to IDE over stdio...", self) stream = messaging.JsonIOStream.from_stdio() # Make sure that nothing else tries to interfere with the stdio streams @@ -143,6 +152,7 @@ def connect_to_server(self, address): """ host, port = address + log.info("{0} connecting to Server on {1}:{2}...", self, host, port) sock = sockets.create_client() sock.connect(address) @@ -208,7 +218,9 @@ def spawn_launcher(): with self._accept_connection_from_launcher() as (_, launcher_port): env[str("PTVSD_LAUNCHER_PORT")] = str(launcher_port) if common_options.log_dir is not None: - env[str("PTVSD_LOG_DIR")] = compat.filename_str(common_options.log_dir) + env[str("PTVSD_LOG_DIR")] = compat.filename_str( + common_options.log_dir + ) if adapter_options.log_stderr: env[str("PTVSD_LOG_STDERR")] = str("debug info warning error") if console == "internalConsole": @@ -369,7 +381,7 @@ def _finalize(self, why, terminate_debuggee): # Tell the IDE that debugging is over, but don't close the channel until it # tells us to, via the "disconnect" request. - if self.ide.is_connected: + if self.ide and self.ide.is_connected: try: self.ide.channel.send_event("terminated") except Exception: diff --git a/src/ptvsd/common/log.py b/src/ptvsd/common/log.py index 9db70603b..365564e3b 100644 --- a/src/ptvsd/common/log.py +++ b/src/ptvsd/common/log.py @@ -22,47 +22,115 @@ """Logging levels, lowest to highest importance. """ -stderr = sys.__stderr__ - stderr_levels = set(os.getenv("PTVSD_LOG_STDERR", "warning error").split()) """What should be logged to stderr. """ -file_levels = set(LEVELS) -"""What should be logged to file, when it is not None. +timestamp_format = "09.3f" +"""Format spec used for timestamps. Can be changed to dial precision up or down. """ -filename_prefix = "ptvsd" -"""Prefix for log file names that are automatically generated by to_file(). -""" +_lock = threading.RLock() +_tls = threading.local() +_files = {} # filename -> LogFile +_levels = set() # combined for all log files + + +def _update_levels(): + global _levels + _levels = frozenset(level for file in _files.values() for level in file.levels) + + +class LogFile(object): + def __init__(self, filename, file, levels=LEVELS): + info("Also logging to {0!j}.", filename) + + self.filename = filename + self.file = file + self._levels = frozenset(levels) + + with _lock: + _files[self.filename] = self + _update_levels() + info( + "{0} {1}\n{2} {3} ({4}-bit)\nptvsd {5}", + platform.platform(), + platform.machine(), + platform.python_implementation(), + platform.python_version(), + 64 if sys.maxsize > 2 ** 32 else 32, + ptvsd.__version__, + _to_files=[self], + ) -file = None -"""If not None, which file to log to. + @property + def levels(self): + return self._levels -This can be automatically set by to_file(). -""" + @levels.setter + def levels(self, value): + with _lock: + self._levels = value + _update_levels() -timestamp_format = "09.3f" -"""Format spec used for timestamps. Can be changed to dial precision up or down. -""" + def write(self, level, output): + if level in self.levels: + try: + self.file.write(output) + self.file.flush() + except Exception: + pass -_lock = threading.Lock() -_tls = threading.local() -_filename = None + def close(self): + with _lock: + del _files[self.filename] + _update_levels() + info("Not logging to {0!j} anymore.", self.filename) + + try: + self.file.close() + except Exception: + pass + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.close() + + +class LogStdErr(LogFile): + def __init__(self): + super(LogStdErr, self).__init__("", sys.stderr) + + @property + def levels(self): + return stderr_levels + + +class NoLog(object): + file = filename = None + + __bool__ = __nonzero__ = lambda self: False + + def close(self): + pass + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + pass # Used to inject a newline into stderr if logging there, to clean up the output # when it's intermixed with regular prints from other sources. def newline(level="info"): with _lock: - if level in stderr_levels: - try: - stderr.write("\n") - except Exception: - pass + _stderr.write("\n") -def write(level, text): +def write(level, text, _to_files=all): assert level in LEVELS t = timestamp.current() @@ -75,18 +143,10 @@ def write(level, text): output = prefix + output + "\n\n" with _lock: - if level in stderr_levels: - try: - stderr.write(output) - except Exception: - pass - - if file and level in file_levels: - try: - file.write(output) - file.flush() - except Exception: - pass + if _to_files is all: + _to_files = _files.values() + for file in _to_files: + file.write(level, output) return text @@ -94,16 +154,16 @@ def write(level, text): def write_format(level, format_string, *args, **kwargs): # Don't spend cycles doing expensive formatting if we don't have to. Errors are # always formatted, so that error() can return the text even if it's not logged. - if level != "error": - if not (level in stderr_levels or (file and level in file_levels)): - return + if level != "error" and level not in _levels: + return try: text = fmt(format_string, *args, **kwargs) except Exception: exception() raise - return write(level, text) + + return write(level, text, kwargs.pop("_to_files", all)) debug = functools.partial(write_format, "debug") @@ -126,11 +186,6 @@ def error(*args, **kwargs): return AssertionError(write_format("error", *args, **kwargs)) -def stack(title="Stack trace"): - stack = "\n".join(traceback.format_stack()) - debug("{0}:\n\n{1}", title, stack) - - def exception(format_string="", *args, **kwargs): """Logs an exception with full traceback. @@ -174,39 +229,46 @@ def exception(format_string="", *args, **kwargs): return exc_info[1] -def to_file(filename=None): - global file, _filename +def to_file(filename=None, prefix=None, levels=LEVELS): + """Starts logging all messages at the specified levels to the designated file. - # TODO: warn when options.log_dir is unset, after fixing improper use in ptvsd.server - if file is not None or options.log_dir is None: - return + Either filename or prefix must be specified, but not both. - _filename = _filename or filename - if _filename is None: - if options.log_dir is None: - warning( - "ptvsd.to_file() cannot generate log file name - ptvsd.options.log_dir is not set" - ) - return - _filename = fmt( - "{0}/{1}-{2}.log", options.log_dir, filename_prefix, os.getpid() - ) - - file = io.open(_filename, "w", encoding="utf-8") - info( - "{0} {1}\n{2} {3} ({4}-bit)\nptvsd {5}", - platform.platform(), - platform.machine(), - platform.python_implementation(), - platform.python_version(), - 64 if sys.maxsize > 2 ** 32 else 32, - ptvsd.__version__, - ) - return _filename + If filename is specified, it designates the log file directly. + + If prefix is specified, the log file is automatically created in options.log_dir, + with filename computed as prefix + os.getpid(). If log_dir is None, no log file + is created, and the function returns immediately. + + If the file with the specified or computed name is already being used as a log + file, it is not overwritten, but its levels are updated as specified. + The function returns an object with a close() method. When the object is closed, + logs are not written into that file anymore. Alternatively, the returned object + can be used in a with-statement: -def filename(): - return _filename + with log.to_file("some.log"): + # now also logging to some.log + # not logging to some.log anymore + """ + + assert (filename is not None) ^ (prefix is not None) + + if filename is None: + if options.log_dir is None: + return NoLog() + try: + os.makedirs(options.log_dir) + except OSError: + pass + filename = fmt("{0}/{1}-{2}.log", options.log_dir, prefix, os.getpid()) + + file = _files.get(filename) + if file is None: + file = LogFile(filename, io.open(filename, "w", encoding="utf-8"), levels) + else: + file.levels = levels + return file @contextlib.contextmanager @@ -290,6 +352,9 @@ def report_paths(get_paths, label=None): info("{0}", result) +_stderr = LogStdErr() + + # The following are helper shortcuts for printf debugging. They must never be used # in production code. @@ -303,3 +368,8 @@ def _vars(*names): if names: locals = {name: locals[name] for name in names if name in locals} warning("$VARS {0!r}", locals) + + +def _stack(): + stack = "\n".join(traceback.format_stack()) + warning("$STACK:\n\n{0}", stack) diff --git a/src/ptvsd/common/util.py b/src/ptvsd/common/util.py index 97ac3c054..3c0173cfa 100644 --- a/src/ptvsd/common/util.py +++ b/src/ptvsd/common/util.py @@ -4,8 +4,11 @@ from __future__ import absolute_import, print_function, unicode_literals +import os import sys +from ptvsd.common import compat + def evaluate(code, path=__file__, mode="eval"): # Setting file path here to avoid breaking here if users have set @@ -16,6 +19,17 @@ def evaluate(code, path=__file__, mode="eval"): return eval(expr, {}, sys.modules) +def prepend_path_entry(env, key, entry): + """Prepends a new entry to a PATH-style environment variable named by key in + env, creating that variable if it doesn't exist already. + """ + try: + tail = os.path.pathsep + env[key] + except KeyError: + tail = "" + env[key] = entry + tail + + class Observable(object): """An object with change notifications.""" @@ -28,3 +42,36 @@ def __setattr__(self, name, value): finally: for ob in self.observers: ob(self, name) + + +class Env(dict): + """A dict for environment variables. + """ + + @staticmethod + def snapshot(): + """Returns a snapshot of the current environment. + """ + return Env(os.environ) + + def copy(self, updated_from=None): + result = Env(self) + if updated_from is not None: + result.update(updated_from) + return result + + def prepend_to(self, key, entry): + """Prepends a new entry to a PATH-style environment variable, creating + it if it doesn't exist already. + """ + try: + tail = os.path.pathsep + self[key] + except KeyError: + tail = "" + self[key] = entry + tail + + def for_popen(self): + """Returns a copy of this dict, with all strings converted to the type + suitable for subprocess.Popen() and other similar APIs. + """ + return {compat.filename_str(k): compat.filename_str(v) for k, v in self.items()} diff --git a/src/ptvsd/launcher/__main__.py b/src/ptvsd/launcher/__main__.py index b5958873f..d088d13fd 100644 --- a/src/ptvsd/launcher/__main__.py +++ b/src/ptvsd/launcher/__main__.py @@ -19,10 +19,9 @@ def main(): from ptvsd.common import log - from ptvsd.launcher import adapter + from ptvsd.launcher import adapter, debuggee - log.filename_prefix = "ptvsd.launcher" - log.to_file() + log.to_file(prefix="ptvsd.launcher") log.describe_environment("ptvsd.launcher startup environment:") def option(name, type, *args): @@ -37,6 +36,9 @@ def option(name, type, *args): adapter.connect(session_id, launcher_port) adapter.channel.wait() + if debuggee.process is not None: + sys.exit(debuggee.process.returncode) + if __name__ == "__main__": # ptvsd can also be invoked directly rather than via -m. In this case, the first diff --git a/src/ptvsd/launcher/adapter.py b/src/ptvsd/launcher/adapter.py index ca87832c5..1a67f0d31 100644 --- a/src/ptvsd/launcher/adapter.py +++ b/src/ptvsd/launcher/adapter.py @@ -113,7 +113,7 @@ def property_or_debug_option(prop_name, flag_name): if cwd == (): # If it's not specified, but we're launching a file rather than a module, # and the specified path has a directory in it, use that. - cwd = None if program == () else (os.path.dirname(program) or None) + cwd = None if program == () else (os.path.dirname(program[0]) or None) env = os.environ.copy() if "PTVSD_TEST" in env: @@ -122,6 +122,9 @@ def property_or_debug_option(prop_name, flag_name): env.pop("COV_CORE_SOURCE", None) env.update(request("env", json.object(unicode))) + if request("gevent", False): + env["GEVENT_SUPPORT"] = "True" + redirect_output = "RedirectOutput" in debug_options if redirect_output: # sys.stdout buffering must be disabled - otherwise we won't see the output diff --git a/src/ptvsd/server/api.py b/src/ptvsd/server/api.py index e5b82b57d..3749928a9 100644 --- a/src/ptvsd/server/api.py +++ b/src/ptvsd/server/api.py @@ -5,6 +5,7 @@ from __future__ import absolute_import, print_function, unicode_literals import contextlib +import json import os import pydevd import sys @@ -39,7 +40,8 @@ def debug(address, log_dir=None, multiprocess=True): if log_dir: common_opts.log_dir = log_dir - log.to_file() + log.to_file(prefix="ptvsd.server") + log.describe_environment("ptvsd.server debug start environment:") log.info("{0}{1!r}", func.__name__, (address, log_dir, multiprocess)) if is_attached(): @@ -76,7 +78,7 @@ def debug(address, log_dir=None, multiprocess=True): @_starts_debugging def enable_attach(dont_trace_start_patterns, dont_trace_end_patterns): if hasattr(enable_attach, "called"): - raise RuntimeError("'enable_attach' can only be called once per process.") + raise RuntimeError("enable_attach() can only be called once per process.") host, port = pydevd._enable_attach( ("127.0.0.1", 0), @@ -87,7 +89,6 @@ def enable_attach(dont_trace_start_patterns, dont_trace_end_patterns): log.info("pydevd debug server running at: {0}:{1}", host, port) - port_queue = queue.Queue() class _DAPMessagesListener(pydevd.IDAPMessagesListener): def before_send(self, msg): pass @@ -99,10 +100,12 @@ def after_receive(self, msg): except KeyError: pass + port_queue = queue.Queue() pydevd.add_dap_messages_listener(_DAPMessagesListener()) with pydevd.skip_subprocess_arg_patch(): import subprocess + adapter_args = [ sys.executable, os.path.join(os.path.dirname(ptvsd.__file__), "adapter"), @@ -111,15 +114,13 @@ def after_receive(self, msg): "--port", str(server_opts.port), "--for-server-on-port", - str(port) + str(port), ] if common_opts.log_dir is not None: adapter_args += ["--log-dir", common_opts.log_dir] - log.info( - "enable_attach() spawning attach-to-PID debugger injector: {0!r}", adapter_args - ) + log.info("enable_attach() spawning adapter: {0!r}", adapter_args) # Adapter life time is expected to be longer than this process, # so never wait on the adapter process @@ -130,8 +131,15 @@ def after_receive(self, msg): server_opts.port = port_queue.get(True, _QUEUE_TIMEOUT) + listener_file = os.getenv("PTVSD_LISTENER_FILE") + if listener_file is not None: + with open(listener_file, "w") as f: + json.dump({"host": server_opts.host, "port": server_opts.port}, f) + enable_attach.called = True - log.info("ptvsd debug server running at: {0}:{1}", server_opts.host, server_opts.port) + log.info( + "ptvsd debug server running at: {0}:{1}", server_opts.host, server_opts.port + ) return server_opts.host, server_opts.port @@ -163,8 +171,7 @@ def break_into_debugger(): stop_at_frame = sys._getframe().f_back while ( stop_at_frame is not None - and global_debugger.get_file_type(stop_at_frame) - == global_debugger.PYDEV_FILE + and global_debugger.get_file_type(stop_at_frame) == global_debugger.PYDEV_FILE ): stop_at_frame = stop_at_frame.f_back @@ -206,10 +213,13 @@ def tracing(should_trace): tid = threading.current_thread().ident if pydb is None: log.info("ptvsd.tracing() ignored on thread {0} - debugger not attached", tid) + def enable_or_disable(_): # Always fetch the fresh value, in case it changes before we restore. _tls.is_tracing = get_global_debugger() is not None + else: + def enable_or_disable(enable): if enable: log.info("Enabling tracing on thread {0}", tid) @@ -234,4 +244,4 @@ def restore_tracing(): enable_or_disable(was_tracing) enable_or_disable(should_trace) - return restore_tracing() \ No newline at end of file + return restore_tracing() diff --git a/src/ptvsd/server/attach_pid_injected.py b/src/ptvsd/server/attach_pid_injected.py index 11318160e..162f2a82b 100644 --- a/src/ptvsd/server/attach_pid_injected.py +++ b/src/ptvsd/server/attach_pid_injected.py @@ -57,21 +57,20 @@ def on_critical(msg): from ptvsd.common import options as common_opts from ptvsd.server import options - common_opts.log_dir = log_dir + if log_dir is not None: + common_opts.log_dir = log_dir options.client = client options.host = host options.port = port - from ptvsd.common import log - log.to_file() - log.info("Debugger injection begin") - if options.client: ptvsd.attach((options.host, options.port)) else: ptvsd.enable_attach((options.host, options.port)) + from ptvsd.common import log log.info("Debugger successfully injected") + except: import traceback traceback.print_exc() diff --git a/src/ptvsd/server/cli.py b/src/ptvsd/server/cli.py index fb17ee0a0..d072a2e59 100644 --- a/src/ptvsd/server/cli.py +++ b/src/ptvsd/server/cli.py @@ -213,6 +213,7 @@ def run_module(): # actually invoking it. On Python 3, it's exposed as a public API, but # on Python 2, we have to invoke a private function in runpy for this. # Either way, if it fails to resolve for any reason, just leave argv as is. + argv_0 = sys.argv[0] try: if sys.version_info >= (3,): from importlib.util import find_spec @@ -343,7 +344,7 @@ def main(): print(HELP + "\nError: " + str(ex), file=sys.stderr) sys.exit(2) - log.to_file() + log.to_file(prefix="ptvsd.server") log.describe_environment("ptvsd.server startup environment:") log.info( "sys.argv before parsing: {0!r}\n" " after parsing: {1!r}", diff --git a/tests/DEBUGGEE_PYTHONPATH/debug_me/__init__.py b/tests/DEBUGGEE_PYTHONPATH/debug_me/__init__.py index 253a08213..8282f5759 100644 --- a/tests/DEBUGGEE_PYTHONPATH/debug_me/__init__.py +++ b/tests/DEBUGGEE_PYTHONPATH/debug_me/__init__.py @@ -20,48 +20,31 @@ __all__ = ["ptvsd", "pydevd", "session_id"] -import imp import os -import sys - -# For `from debug_me import ...`. -import ptvsd -import ptvsd.server -import pydevd # Used by backchannel. -session_id = int(os.getenv("PTVSD_SESSION_ID")) -name = "ptvsd-" + str(session_id) - - -# For all start methods except for "attach_socket_import", DebugSession itself -# will take care of starting the debuggee process correctly. -# -# For "attach_socket_import", DebugSession will supply the code that needs to -# be executed in the debuggee to enable debugging and establish connection back -# to DebugSession - the debuggee simply needs to execute it as is. -_code = os.getenv("PTVSD_DEBUG_ME") -if _code: - # Remove it, so that subprocesses don't try to manually configure ptvsd on the - # same port. In multiprocess scenarios, subprocesses are supposed to load ptvsd - # via code that is automatically injected into the subprocess by its parent. - del os.environ["PTVSD_DEBUG_ME"] - - _code = compile(_code, "", "exec") +session_id = int(os.getenv("PTVSD_TEST_SESSION_ID")) +name = "Debuggee-" + str(session_id) - # On Python 2, imports use a global import lock, which deadlocks enable_attach() - # when it tries to import from a background thread. This works around that. - if sys.version_info < (3,): - imp.release_lock() - try: - eval(_code, {}) - finally: - if sys.version_info < (3,): - imp.acquire_lock() # For non-blocking communication between the test and the debuggee. The debuggee # can access this as a normal dict - scratchpad["foo"] etc. The test should assign # to session.scratchpad[...], which will automatically perform "evaluate" requests # as needed to assign the value. scratchpad = {} + + +# Some runners require code to be executed in the debuggee process, either to set up +# the debug server, or to ensure that it doesn't run any other code until the debugger +# is attached. This provides a facility to inject such code. +_code = os.environ.pop("PTVSD_TEST_DEBUG_ME", None) +if _code: + _code = compile(_code, "", "exec") + eval(_code, {}) + + +# For `from debug_me import ...`. +import ptvsd +import ptvsd.server +import pydevd diff --git a/tests/DEBUGGEE_PYTHONPATH/debug_me/backchannel.py b/tests/DEBUGGEE_PYTHONPATH/debug_me/backchannel.py index 5734244fe..e4c8c678c 100644 --- a/tests/DEBUGGEE_PYTHONPATH/debug_me/backchannel.py +++ b/tests/DEBUGGEE_PYTHONPATH/debug_me/backchannel.py @@ -26,15 +26,6 @@ def receive(): return _stream.read_json() -def wait_for(expected): - actual = receive() - assert expected == actual, fmt( - "Debuggee expected {0!r} on backchannel, but got {1!r} from the test", - expected, - actual, - ) - - def close(): global _socket, _stream if _socket is None: @@ -64,14 +55,11 @@ def _error(*_): name = fmt("backchannel-{0}", debug_me.session_id) -port = os.getenv("PTVSD_BACKCHANNEL_PORT") +port = os.environ.pop("PTVSD_TEST_BACKCHANNEL_PORT", None) if port is not None: port = int(port) log.info("Connecting {0} to port {1}...", name, port) - # Remove it, so that subprocesses don't try to use the same backchannel. - del os.environ["PTVSD_BACKCHANNEL_PORT"] - _socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: _socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) @@ -80,5 +68,7 @@ def _error(*_): _socket.close() raise else: - _stream = messaging.JsonIOStream.from_socket(_socket, name="backchannel") # noqa + _stream = messaging.JsonIOStream.from_socket( # noqa + _socket, name="backchannel" + ) atexit.register(close) diff --git a/tests/__init__.py b/tests/__init__.py index 9f715de7f..660ac1cba 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -10,7 +10,6 @@ import pkgutil import pytest import py.path -import sys # Do not import anything from ptvsd until assert rewriting is enabled below! @@ -55,11 +54,9 @@ def _register_assert_rewrite(modname): # Enable full logging to stderr, and make timestamps shorter to match maximum test # run time better. -log.stderr = sys.stderr # use pytest-captured stderr rather than __stderr__ log.stderr_levels = set(log.LEVELS) log.timestamp_format = "06.3f" -log.filename_prefix = "tests" -log.to_file() +log.to_file(prefix="tests") # Enable JSON serialization for py.path.local. diff --git a/tests/_logs/clean.cmd b/tests/_logs/clean.cmd new file mode 100644 index 000000000..8e3bc1e1b --- /dev/null +++ b/tests/_logs/clean.cmd @@ -0,0 +1 @@ +@for /d %%i in (%~dp0\test_*) do rd /s /q "%%i" diff --git a/tests/debug/__init__.py b/tests/debug/__init__.py index 3a0f2e2bd..5e5995d6d 100644 --- a/tests/debug/__init__.py +++ b/tests/debug/__init__.py @@ -4,18 +4,9 @@ from __future__ import absolute_import, division, print_function, unicode_literals -import py - -import ptvsd - -PTVSD_DIR = py.path.local(ptvsd.__file__) / ".." -PTVSD_ADAPTER_DIR = PTVSD_DIR / "adapter" - -# Added to the environment variables of all adapters and servers. -PTVSD_ENV = {"PYTHONUNBUFFERED": "1"} - # Expose Session directly. def Session(*args, **kwargs): from tests.debug import session + return session.Session(*args, **kwargs) diff --git a/tests/debug/comms.py b/tests/debug/comms.py index d70e3d7e4..1ffe7e4dd 100644 --- a/tests/debug/comms.py +++ b/tests/debug/comms.py @@ -24,7 +24,7 @@ def __init__(self, session): self._server_socket = None def __str__(self): - return fmt("backchannel-{0}", self.session.id) + return fmt("{0}.backchannel", self.session.debuggee_id) def listen(self): self._server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) @@ -70,14 +70,6 @@ def send(self, value): self._stream.write_json(value) return t - def expect(self, expected): - actual = self.receive() - assert expected == actual, fmt( - "Test expected {0!r} on backchannel, but got {1!r} from the debuggee", - expected, - actual, - ) - def close(self): if self._socket: log.debug("Closing {0} socket of {1}...", self, self.session) diff --git a/tests/debug/config.py b/tests/debug/config.py new file mode 100644 index 000000000..cd6945414 --- /dev/null +++ b/tests/debug/config.py @@ -0,0 +1,222 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See LICENSE in the project root +# for license information. + +from __future__ import absolute_import, division, print_function, unicode_literals + +import collections +import os + + +class DebugConfig(collections.MutableMapping): + """Debug configuration for a session. Corresponds to bodies of DAP "launch" and + "attach" requests, or launch.json in VSCode. + + It is a dict-like object that only allows keys that are valid debug configuration + properties for ptvsd. When a property is queried, but it's not explicitly set in + the config, the default value (i.e. what ptvsd will assume the property is set to) + is returned. + + In addition, it exposes high-level wrappers over "env" and "debugOptions". + """ + + # Valid configuration properties. Keys are names, and values are defaults that + # are assumed by the adapter and/or the server if the property is not specified. + # If the property is required, or if the default is computed in such a way that + # it cannot be predicted, the value is (). + PROPERTIES = { + # Common + "breakOnSystemExitZero": False, + "debugOptions": [], + "django": False, + "jinja": False, + "justMyCode": False, + "flask": False, + "logToFile": False, + "maxExceptionStackFrames": (), + "name": (), + "noDebug": False, + "pathMappings": [], + "pyramid": False, + "pythonPath": (), + "redirectOutput": False, + "rules": [], + "showReturnValue": True, + "steppingResumesAllThreads": True, + "subProcess": False, + "successExitCodes": [0], + "type": (), + # Launch + "args": [], + "code": (), + "console": "internal", + "cwd": (), + "env": {}, + "gevent": False, + "internalConsoleOptions": "neverOpen", + "module": (), + "program": (), + "stopOnEntry": False, + "sudo": False, + "waitOnNormalExit": False, + "waitOnAbnormalExit": False, + # Attach by socket + "host": (), + "port": (), + # Attach by PID + "processId": (), + } + + def __init__(self, *args, **kwargs): + self._dict = dict(*args, **kwargs) + self.env = self.Env(self) + self.debug_options = self.DebugOptions(self) + + def __iter__(self): + return iter(self._dict) + + def __len__(self): + return len(self._dict) + + def __contains__(self, key): + return key in self._dict + + def __getitem__(self, key): + try: + return self._dict[key] + except KeyError: + try: + value = self.PROPERTIES[key] + except KeyError: + pass + else: + if value != (): + return value + raise + + def __delitem__(self, key): + del self._dict[key] + + def __setitem__(self, key, value): + assert key in self.PROPERTIES + self._dict[key] = value + + def __getstate__(self): + return dict(self) + + def setdefault(self, key, default=None): + if key not in self: + self[key] = default + return self[key] + + def setdefaults(self, defaults): + """Like setdefault(), but sets multiple default values at once. + """ + for k, v in defaults.items(): + self.setdefault(k, v) + + def normalize(self): + """Normalizes the debug configuration by adding any derived properties, in + the manner similar to what VSCode does to launch.json before submitting it + to the adapter. + """ + + if self["showReturnValue"]: + self.debug_options.add("ShowReturnValue") + + if self["redirectOutput"]: + self.debug_options.add("RedirectOutput") + + if not self["justMyCode"]: + self.debug_options.add("DebugStdLib") + + if self["django"]: + self.debug_options.add("Django") + + if self["jinja"]: + self.debug_options.add("Jinja") + + if self["flask"]: + self.debug_options.add("Flask") + + if self["pyramid"]: + self.debug_options.add("Pyramid") + + if self["subProcess"]: + self.debug_options.add("Multiprocess") + + if self["breakOnSystemExitZero"]: + self.debug_options.add("BreakOnSystemExitZero") + + if self["stopOnEntry"]: + self.debug_options.add("StopOnEntry") + + if self["waitOnNormalExit"]: + self.debug_options.add("WaitOnNormalExit") + + if self["waitOnAbnormalExit"]: + self.debug_options.add("WaitOnAbnormalExit") + + class Env(collections.MutableMapping): + """Wraps config["env"], automatically creating and destroying it as needed. + """ + + def __init__(self, config): + self.config = config + + def __iter__(self): + return iter(self.config["env"]) + + def __len__(self): + return len(self.config["env"]) + + def __getitem__(self, key): + return self.config["env"][key] + + def __delitem__(self, key): + env = self.config.get("env", {}) + del env[key] + if not len(env): + del self.config["env"] + + def __setitem__(self, key, value): + self.config.setdefault("env", {})[key] = value + + def __getstate__(self): + return dict(self) + + def prepend_to(self, key, entry): + """Prepends a new entry to a PATH-style environment variable, creating + it if it doesn't exist already. + """ + try: + tail = os.path.pathsep + self[key] + except KeyError: + tail = "" + self[key] = entry + tail + + class DebugOptions(collections.MutableSet): + """Wraps config["debugOptions"], automatically creating and destroying it as + needed, and providing set operations for it. + """ + + def __init__(self, config): + self.config = config + + def __iter__(self): + return iter(self.config["debugOptions"]) + + def __len__(self): + return len(self.config["env"]) + + def __contains__(self, key): + return key in self.config["debugOptions"] + + def add(self, key): + opts = self.config.setdefault("debugOptions", []) + if key not in opts: + opts.append(key) + + def discard(self, key): + opts = self.config.get("debugOptions", []) + opts[:] = [x for x in opts if x != key] diff --git a/tests/debug/output.py b/tests/debug/output.py index d934c1176..cb00b0134 100644 --- a/tests/debug/output.py +++ b/tests/debug/output.py @@ -9,7 +9,7 @@ from ptvsd.common import fmt, log -class CaptureOutput(object): +class CapturedOutput(object): """Captures stdout and stderr of the debugged process. """ @@ -19,8 +19,14 @@ def __init__(self, session): self._chunks = {} self._worker_threads = [] + assert not len(session.captured_output - {"stdout", "stderr"}) + for stream_name in session.captured_output: + log.info("Capturing {0} {1}", session.debuggee_id, stream_name) + stream = getattr(session.debuggee, stream_name) + self._capture(stream, stream_name) + def __str__(self): - return fmt("CaptureOutput({0})", self.session) + return fmt("CapturedOutput({0})", self.session) def _worker(self, pipe, name): chunks = self._chunks[name] @@ -32,7 +38,7 @@ def _worker(self, pipe, name): if not len(chunk): break - log.info("{0} {1}> {2!r}", self.session, name, chunk) + log.info("{0} {1}:\n{2!r}", self.session.debuggee_id, name, chunk) with self._lock: chunks.append(chunk) @@ -47,20 +53,12 @@ def _capture(self, pipe, name): thread.start() self._worker_threads.append(thread) - def capture(self, process): - """Start capturing stdout and stderr of the process. - """ - assert not self._worker_threads - log.info("Capturing {0} stdout and stderr", self.session) - self._capture(process.stdout, "stdout") - self._capture(process.stderr, "stderr") - def wait(self, timeout=None): """Wait for all remaining output to be captured. """ if not self._worker_threads: return - log.debug("Waiting for remaining {0} stdout and stderr...", self.session) + log.debug("Waiting for remaining {0} output...", self.session.debuggee_id) for t in self._worker_threads: t.join(timeout) self._worker_threads[:] = [] @@ -70,7 +68,7 @@ def _output(self, which, encoding, lines): result = self._chunks[which] except KeyError: raise AssertionError( - fmt("{0} was not captured for {1}", which, self.session) + fmt("{0} was not captured for {1}", which, self.session.debuggee_id) ) with self._lock: diff --git a/tests/debug/runners.py b/tests/debug/runners.py new file mode 100644 index 000000000..604dbec06 --- /dev/null +++ b/tests/debug/runners.py @@ -0,0 +1,242 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See LICENSE in the project root +# for license information. + +from __future__ import absolute_import, division, print_function, unicode_literals + +"""Runners are recipes for executing Targets in a debug.Session. + +Every function in this module that is decorated with @_runner must have at least two +positional arguments: (session, target) - and can have additional arguments. For every +such function, two artifacts are produced. + +The function is exposed directly as a method on Session, with the session argument +becoming self. + +The function is also exposed as a Runner object from this module. Runner objects are +callable, and invoke the wrapped function when called, but in addition, they can also +be bound to specific arguments, by using either [] or with_options(), which can be +chained arbitrarily:: + + # Direct invocation: + session.attach_by_socket("cli", log_dir="...") + + # Indirect invocation: + run = runners.attach_by_socket + run = run["cli"] + run = run.with_options(log_dir="...") + run(session, target) + +runner[x][y][z] is just a convenient shorthand for binding positional arguments, same +as runner.with_options(x, y, z). + +Runners are immutable, so every use of [] or with_options() creates a new runner with +the specified arguments bound. The runner must have all its required arguments bound +before it can be invoked. + +Regardless of whether the runner is invoked directly on the Session, or via a Runner +object, if the start DAP sequence involves a configuration phase (the "initialized" +event and the "configurationDone" request), the runner must be used in a with-statement. +The statements inside the with-statement are executed after receiving the "initialized" +event, and before sending the "configurationDone" request:: + + with run(session, target): + # DAP requests can be made to session, but target is not running yet. + session.set_breakpoints(...) + # target is running now! + +If there is no configuration phase, the runner returns directly:: + + session.config["noDebug"] = True + run(session, target) + # target is running now! +""" + +import os +import sys + +import ptvsd +from ptvsd.common import compat, fmt, log +from tests import net +from tests.debug import session + + +def _runner(f): + assert f.__name__.startswith("launch") or f.__name__.startswith("attach") + setattr(session.Session, f.__name__, f) + + class Runner(object): + request = "launch" if f.__name__.startswith("launch") else "attach" + + def __init__(self, *args, **kwargs): + self._args = tuple(args) + self._kwargs = dict(kwargs) + + def __getattr__(self, name): + return self._kwargs[name] + + def __call__(self, session, target, *args, **kwargs): + if len(args) or len(kwargs): + return self.with_options(*args, **kwargs)(session, target) + return f(session, target, *self._args, **self._kwargs) + + def __getitem__(self, arg): + return self.with_options(arg) + + def with_options(self, *args, **kwargs): + new_args = self._args + args + new_kwargs = dict(self._kwargs) + new_kwargs.update(kwargs) + return Runner(*new_args, **new_kwargs) + + def __repr__(self): + result = type(self).__name__ + args = [str(x) for x in self._args] + [ + fmt("{0}={1}", k, v) for k, v in self._kwargs.items() + ] + if len(args): + result += "(" + ", ".join(args) + ")" + return result + + @property + def pytest_id(self): + return repr(self) + + Runner.__name__ = f.__name__ + return Runner() + + +@_runner +def launch(session, target, console="integratedTerminal", cwd=None): + assert console in ("internalConsole", "integratedTerminal", "externalTerminal") + + log.info("Launching {0} in {1} using {2!j}.", target, session, console) + + target.configure(session) + config = session.config + config.setdefaults( + { + "console": "externalTerminal", + "internalConsoleOptions": "neverOpen", + "pythonPath": sys.executable, + } + ) + config["console"] = console + if cwd is not None: + config["cwd"] = cwd + + env = ( + session.spawn_adapter.env + if config["console"] == "internalConsole" + else config.env + ) + target.cli(env) + + session.spawn_adapter() + return session.request_launch() + + +def _attach_common_config(session, target, cwd): + assert target.code is None or "debug_me" in target.code, fmt( + "{0} must import debug_me.", target.filename + ) + + target.configure(session) + config = session.config + if cwd is not None: + config.setdefault("pathMappings", [{"localRoot": cwd, "remoteRoot": "."}]) + return config + + +@_runner +def attach_by_pid(session, target, cwd=None, wait=True): + log.info("Attaching {0} to {1} by PID.", session, target) + + config = session.config + try: + config["processId"] = int(target) + except TypeError: + pass + + if "processId" not in config: + _attach_common_config(session, target, cwd) + args = target.cli(session.spawn_debuggee.env) + + if wait: + debug_me = """ +import sys +while not "ptvsd" in sys.modules: pass +import ptvsd +while not ptvsd.is_attached(): pass + """ + else: + debug_me = None + + session.spawn_debuggee(args, cwd=cwd, debug_me=debug_me) + config["processId"] = session.debuggee.pid + + session.spawn_adapter() + return session.request_attach() + + +@_runner +def attach_by_socket( + session, target, method, listener="server", cwd=None, wait=True, log_dir=None +): + log.info( + "Attaching {0} to {1} by socket using {2}.", session, target, method.upper() + ) + + assert method in ("api", "cli") + assert listener in ("server") # TODO: ("adapter", "server") + + config = _attach_common_config(session, target, cwd) + + host = config["host"] = attach_by_socket.host + port = config["port"] = attach_by_socket.port + + if method == "cli": + args = [os.path.dirname(ptvsd.__file__)] + if wait: + args += ["--wait"] + args += ["--host", compat.filename_str(host), "--port", str(port)] + if not config["subProcess"]: + args += ["--no-subprocesses"] + if log_dir is not None: + args += ["--log-dir", log_dir] + debug_me = None + elif method == "api": + args = [] + debug_me = """ +import ptvsd +ptvsd.enable_attach(({host!r}, {port!r}), {args}) +if {wait!r}: + ptvsd.wait_for_attach() +""" + attach_args = "" if log_dir is None else fmt("log_dir={0!r}", log_dir) + debug_me = fmt(debug_me, host=host, port=port, wait=wait, args=attach_args) + else: + raise ValueError + args += target.cli(session.spawn_debuggee.env) + + session.spawn_debuggee(args, cwd=cwd, debug_me=debug_me) + if wait: + session.wait_for_enable_attach() + + session.connect_to_adapter((host, port)) + return session.request_attach() + + +attach_by_socket.host = "127.0.0.1" +attach_by_socket.port = net.get_test_server_port(5678, 5800) + + +all_launch = [ + launch["internalConsole"], + launch["integratedTerminal"], + launch["externalTerminal"], +] + +all_attach = [attach_by_socket["api"], attach_by_socket["cli"], attach_by_pid] + +all = all_launch + all_attach diff --git a/tests/debug/session.py b/tests/debug/session.py index 5d550bef6..8137f60f1 100644 --- a/tests/debug/session.py +++ b/tests/debug/session.py @@ -5,57 +5,169 @@ from __future__ import absolute_import, division, print_function, unicode_literals import collections +import contextlib import itertools import os import psutil +import py import subprocess import sys +import time -from ptvsd.common import compat, fmt, json, log, messaging +import ptvsd.adapter +from ptvsd.common import compat, fmt, json, log, messaging, options, sockets, util from ptvsd.common.compat import unicode import tests -from tests import code, debug, timeline, watchdog -from tests.debug import comms, output +from tests import code, timeline, watchdog +from tests.debug import comms, config, output from tests.patterns import some +DEBUGGEE_PYTHONPATH = tests.root / "DEBUGGEE_PYTHONPATH" + + StopInfo = collections.namedtuple( "StopInfo", ["body", "frames", "thread_id", "frame_id"] ) class Session(object): - counter = itertools.count(1) - - _ignore_unobserved = [ - timeline.Event("module"), - timeline.Event("continued"), - timeline.Event("exited"), - timeline.Event("terminated"), - timeline.Event("thread", some.dict.containing({"reason": "started"})), - timeline.Event("thread", some.dict.containing({"reason": "exited"})), - timeline.Event("output", some.dict.containing({"category": "stdout"})), - timeline.Event("output", some.dict.containing({"category": "stderr"})), - timeline.Event("output", some.dict.containing({"category": "console"})), - ] - - def __init__( - self, start_method, log_dir=None, client_id="vscode", backchannel=False - ): + """A test debug session. Manages the lifetime of the adapter and the debuggee + processes, captures debuggee stdio output, establishes a DAP message channel to + the debuggee, and records all DAP messages in that channel on a Timeline object. + + Must be used in a with-statement for proper cleanup. On successful exit - if no + exception escapes from the with-statement - the session will: + + 1. Invoke wait_for_exit(), unless expected_exit_code is None. + 2. Invoke disconnect(). + 3. Wait for the adapter process to exit. + 4. Finalize and closes the timeline + + If the exit is due to an exception, the session will: + + 1. Invoke disconnect(force=True). + 2. Kill the debuggee and the adapter processes. + + Example:: + + with debug.Session() as session: + # Neither debuggee nor adapter are spawned yet. Initial configuration. + session.log_dir = ... + session.config.update({...}) + + with session.launch(...): + # Debuggee and adapter are spawned, but there is no code executing + # in the debuggee yet. + session.set_breakpoints(...) + + # Code is executing in the debuggee. + session.wait_for_stop(expected_frames=[...]) + assert session.get_variable(...) == ... + session.request_continue() + + # Session is disconnected from the debuggee, and both the debuggee and the + # adapter processes have exited. + assert session.exit_code == ... + """ + + tmpdir = None + """Temporary directory in which Sessions can create the temp files they need. + + Automatically set to tmpdir for the current test by pytest_fixtures.test_wrapper(). + """ + + _counter = itertools.count(1) + + def __init__(self): + assert Session.tmpdir is not None watchdog.start() - self.id = next(Session.counter) - self.log_dir = log_dir - self.start_method = start_method(self) - self.client_id = client_id - self.timeline = timeline.Timeline(str(self)) - self.ignore_unobserved.extend(self._ignore_unobserved) - self.ignore_unobserved.extend(self.start_method.ignore_unobserved) + self.id = next(Session._counter) + log.info("Starting {0}", self) + + self.client_id = "vscode" + + self.debuggee = None + """psutil.Popen instance for the debuggee process.""" + + self.adapter = None + """psutil.Popen instance for the adapter process.""" - self.adapter_process = None self.channel = None - self.backchannel = comms.BackChannel(self) if backchannel else None + """JsonMessageChannel to the adapter.""" + + self.captured_output = {"stdout", "stderr"} + """Before the debuggee is spawned, this is the set of stdio streams that + should be captured once it is spawned. + + After it is spawned, this is a CapturedOutput object capturing those streams. + """ + + self.backchannel = None + """The BackChannel object to talk to the debuggee. + + Must be explicitly created with open_backchannel(). + """ + self.scratchpad = comms.ScratchPad(self) + """The ScratchPad object to talk to the debuggee.""" + + self.start_request = None + """The "launch" or "attach" request that started executing code in this session. + """ + + self.expected_exit_code = 0 + """The expected exit code for the debuggee process. + + If None, the debuggee is not expected to exit when the Session is closed. + + If not None, this is validated against both exit_code and debuggee.returncode. + """ + + self.exit_code = None + """The actual exit code for the debuggee process, as received from DAP. + """ + + self.config = config.DebugConfig( + { + "justMyCode": True, + "name": "Test", + "redirectOutput": True, + "type": "python", + } + ) + """The debug configuration for this session.""" + + self.log_dir = ( + None + if options.log_dir is None + else py.path.local(options.log_dir) / str(self) + ) + """The log directory for this session. Passed via PTVSD_LOG_DIR to all spawned + child processes. + + If set to None, PTVSD_LOG_DIR is not automatically added, but tests can still + provide it manually. + """ + + self.tmpdir = Session.tmpdir / str(self) + self.tmpdir.ensure(dir=True) + + self.timeline = timeline.Timeline(str(self)) + self.ignore_unobserved.extend( + [ + timeline.Event("module"), + timeline.Event("continued"), + # timeline.Event("exited"), + # timeline.Event("terminated"), + timeline.Event("thread", some.dict.containing({"reason": "started"})), + timeline.Event("thread", some.dict.containing({"reason": "exited"})), + timeline.Event("output", some.dict.containing({"category": "stdout"})), + timeline.Event("output", some.dict.containing({"category": "stderr"})), + timeline.Event("output", some.dict.containing({"category": "console"})), + ] + ) # Expose some common members of timeline directly - these should be the ones # that are the most straightforward to use, and are difficult to use incorrectly. @@ -70,27 +182,40 @@ def __init__( self.all_occurrences_of = self.timeline.all_occurrences_of self.observe_all = self.timeline.observe_all + spawn_adapter = self.spawn_adapter + self.spawn_adapter = lambda *args, **kwargs: spawn_adapter(*args, **kwargs) + self.spawn_adapter.env = util.Env() + + spawn_debuggee = self.spawn_debuggee + self.spawn_debuggee = lambda *args, **kwargs: spawn_debuggee(*args, **kwargs) + self.spawn_debuggee.env = util.Env() + def __str__(self): - return fmt("ptvsd-{0}", self.id) + return fmt("Session-{0}", self.id) @property def adapter_id(self): - return fmt("adapter-{0}", self.id) + return fmt("Adapter-{0}", self.id) @property def debuggee_id(self): - return fmt("debuggee-{0}", self.id) + return fmt("Debuggee-{0}", self.id) def __enter__(self): - self._start_adapter() - self._handshake() return self def __exit__(self, exc_type, exc_val, exc_tb): + if self.timeline.is_frozen: + self.timeline.unfreeze() + + # Only wait for exit if there was no exception in the test - if there was one, + # the debuggee might still be waiting for further requests. if exc_type is None: - # Only wait for debuggee if there was no exception in the test - if there - # was one, the debuggee might still be waiting for further requests. - self.start_method.wait_for_debuggee() + # If expected_exit_code is set to None, the debuggee is not expected to + # exit after this Session is closed (e.g. because another Session will + # attach to it later on). + if self.expected_exit_code is not None: + self.wait_for_exit() else: # Log the error, in case another one happens during shutdown. log.exception(exc_info=(exc_type, exc_val, exc_tb)) @@ -101,48 +226,170 @@ def __exit__(self, exc_type, exc_val, exc_tb): else: # If there was an exception, don't try to send any more messages to avoid # spamming log with irrelevant entries - just close the channel and kill - # the adapter process immediately. Don't close or finalize the timeline, - # either, since it'll have unobserved events in it. + # all the processes immediately. Don't close or finalize the timeline, + # either, since it'll likely have unobserved events in it. self.disconnect(force=True) - if self.adapter_process is not None: + if self.adapter is not None: + try: + self.adapter.kill() + except Exception: + pass + if self.debuggee is not None: try: - self.adapter_process.kill() + self.debuggee.kill() except Exception: pass - if self.adapter_process is not None: + if self.adapter is not None: log.info( "Waiting for {0} with PID={1} to exit.", self.adapter_id, - self.adapter_process.pid, + self.adapter.pid, ) - self.adapter_process.wait() - watchdog.unregister_spawn(self.adapter_process.pid, self.adapter_id) - self.adapter_process = None + self.adapter.wait() + watchdog.unregister_spawn(self.adapter.pid, self.adapter_id) + self.adapter = None - if self.backchannel: + if self.backchannel is not None: self.backchannel.close() self.backchannel = None - @property - def process(self): - return self.start_method.debuggee_process - - @property - def pid(self): - return self.process.pid - @property def ignore_unobserved(self): return self.timeline.ignore_unobserved - @property - def expected_exit_code(self): - return self.start_method.expected_exit_code + def open_backchannel(self): + assert self.backchannel is None + self.backchannel = comms.BackChannel(self) + self.backchannel.listen() + return self.backchannel - @expected_exit_code.setter - def expected_exit_code(self, value): - self.start_method.expected_exit_code = value + def _init_log_dir(self): + if self.log_dir is None: + return False + + log.info("Logs for {0} will be in {1!j}", self, self.log_dir) + try: + self.log_dir.remove() + except Exception: + pass + self.log_dir.ensure(dir=True) + + # Make subsequent calls of this method no-op for the remainder of the session. + self._init_log_dir = lambda: True + return True + + def _make_env(self, base_env, codecov=True): + env = util.Env.snapshot() + + if base_env is not None: + base_env = dict(base_env) + python_path = base_env.pop("PYTHONPATH", None) + if python_path is not None: + env.prepend_to("PYTHONPATH", python_path) + env.update(base_env) + + env["PTVSD_TEST_SESSION_ID"] = str(self.id) + env.prepend_to("PYTHONPATH", DEBUGGEE_PYTHONPATH.strpath) + + if self._init_log_dir(): + env.update( + { + "PTVSD_LOG_DIR": self.log_dir.strpath, + "PYDEVD_DEBUG": "True", + "PYDEVD_DEBUG_FILE": (self.log_dir / "pydevd.log").strpath, + } + ) + + if self.backchannel is not None: + env["PTVSD_TEST_BACKCHANNEL_PORT"] = str(self.backchannel.port) + + return env + + def spawn_debuggee(self, args, cwd=None, exe=sys.executable, debug_me=None): + assert self.debuggee is None + + args = [exe] + [ + compat.filename_str(s.strpath if isinstance(s, py.path.local) else s) + for s in args + ] + + env = self._make_env(self.spawn_debuggee.env, codecov=False) + env["PTVSD_LISTENER_FILE"] = self.listener_file = self.tmpdir / "listener" + if debug_me is not None: + env["PTVSD_TEST_DEBUG_ME"] = debug_me + + log.info( + "Spawning {0}:\n\n" + "Current directory: {1!j}\n\n" + "Command line: {2!j}\n\n" + "Environment variables: {3!j}\n\n", + self.debuggee_id, + cwd, + args, + env, + ) + self.debuggee = psutil.Popen( + args, + cwd=cwd, + env=env.for_popen(), + bufsize=0, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + log.info("Spawned {0} with PID={1}", self.debuggee_id, self.debuggee.pid) + watchdog.register_spawn(self.debuggee.pid, self.debuggee_id) + + if self.captured_output: + self.captured_output = output.CapturedOutput(self) + + def wait_for_enable_attach(self): + log.info( + "Waiting for debug server in {0} to open a listener socket...", + self.debuggee_id, + ) + while not self.listener_file.check(): + time.sleep(0.1) + + def spawn_adapter(self): + assert self.adapter is None + assert self.channel is None + + args = [sys.executable, os.path.dirname(ptvsd.adapter.__file__)] + env = self._make_env(self.spawn_adapter.env) + + log.info( + "Spawning {0}:\n\n" + "Command line: {1!j}\n\n" + "Environment variables: {2!j}\n\n", + self.adapter_id, + args, + env, + ) + self.adapter = psutil.Popen( + args, + bufsize=0, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + env=env.for_popen(), + ) + log.info("Spawned {0} with PID={1}", self.adapter_id, self.adapter.pid) + watchdog.register_spawn(self.adapter.pid, self.adapter_id) + + stream = messaging.JsonIOStream.from_process(self.adapter, name=self.adapter_id) + self._start_channel(stream) + + def connect_to_adapter(self, address): + assert self.channel is None + + host, port = address + log.info("Connecting to {0} at {1}:{2}", self.adapter_id, host, port) + sock = sockets.create_client() + sock.connect(address) + + stream = messaging.JsonIOStream.from_socket(sock, name=self.adapter_id) + self._start_channel(stream) def request(self, *args, **kwargs): freeze = kwargs.pop("freeze", True) @@ -159,6 +406,8 @@ def send_request(self, command, arguments=None, proceed=True): message = self.channel.send_request(command, arguments) request = self.timeline.record_request(message) + if command in ("launch", "attach"): + self.start_request = request # Register callback after recording the request, so that there's no race # between it being recorded, and the response to it being received. @@ -167,15 +416,33 @@ def send_request(self, command, arguments=None, proceed=True): return request def _process_event(self, event): - if event.event == "ptvsd_subprocess": + occ = self.timeline.record_event(event, block=False) + if event.event == "exited": + self.observe(occ) + self.exit_code = event("exitCode", int) + assert self.exit_code == self.expected_exit_code + elif event.event == "ptvsd_subprocess": + self.observe(occ) pid = event("processId", int) - watchdog.register_spawn(pid, fmt("{0}-subprocess-{1}", self, pid)) - self.timeline.record_event(event, block=False) + watchdog.register_spawn( + pid, fmt("{0}-subprocess-{1}", self.debuggee_id, pid) + ) def _process_request(self, request): self.timeline.record_request(request, block=False) if request.command == "runInTerminal": - return self.start_method.run_in_terminal(request) + args = request("args", json.array(unicode)) + cwd = request("cwd", ".") + env = request("env", json.object(unicode)) + try: + exe = args.pop(0) + assert not len(self.spawn_debuggee.env) + self.spawn_debuggee.env = env + self.spawn_debuggee(args, cwd, exe=exe) + return {} + except OSError as exc: + log.exception('"runInTerminal" failed:') + raise request.cant_handle(str(exc)) else: raise request.isnt_valid("not supported") @@ -197,28 +464,7 @@ def _process_response(self, request, response): def _process_disconnect(self): self.timeline.mark("disconnect", block=False) - def _start_adapter(self): - args = [sys.executable, debug.PTVSD_ADAPTER_DIR] - if self.log_dir is not None: - args += ["--log-dir", self.log_dir] - args = [compat.filename_str(s) for s in args] - - env = os.environ.copy() - env.update(debug.PTVSD_ENV) - env = { - compat.filename_str(k): compat.filename_str(v) for k, v in env.items() - } - - log.info("Spawning {0}: {1!j}", self.adapter_id, args) - self.adapter_process = psutil.Popen( - args, bufsize=0, stdin=subprocess.PIPE, stdout=subprocess.PIPE, env=env - ) - log.info("Spawned {0} with PID={1}", self.adapter_id, self.adapter_process.pid) - watchdog.register_spawn(self.adapter_process.pid, self.adapter_id) - - stream = messaging.JsonIOStream.from_process( - self.adapter_process, name=str(self) - ) + def _start_channel(self, stream): handlers = messaging.MessageHandlers( request=self._process_request, event=self._process_event, @@ -227,7 +473,6 @@ def _start_adapter(self): self.channel = messaging.JsonMessageChannel(stream, handlers) self.channel.start() - def _handshake(self): telemetry = self.wait_for_next_event("output") assert telemetry == { "category": "telemetry", @@ -235,57 +480,82 @@ def _handshake(self): "data": {"version": some.str}, } - self.send_request( + self.request( "initialize", { "pathFormat": "path", "clientID": self.client_id, - # "clientName":"Visual Studio Code", "adapterID": "test", "linesStartAt1": True, "columnsStartAt1": True, "supportsVariableType": True, "supportsRunInTerminalRequest": True, - # "supportsMemoryReferences":true, - # "supportsHandshakeRequest":true, - # "AdditionalProperties":{} }, - ).wait_for_response() + ) - def configure(self, run_as, target, env=None, **kwargs): - env = {} if env is None else dict(env) - env.update(debug.PTVSD_ENV) + def all_events(self, event, body=some.object): + return [ + occ.body + for occ in self.timeline.all_occurrences_of(timeline.Event(event, body)) + ] - pythonpath = env.get("PYTHONPATH", "") - if pythonpath: - pythonpath += os.pathsep - pythonpath += (tests.root / "DEBUGGEE_PYTHONPATH").strpath - pythonpath += os.pathsep + (debug.PTVSD_DIR / "..").strpath - env["PYTHONPATH"] = pythonpath + def output(self, category): + """Returns all output of a given category as a single string, assembled from + all the "output" events received for that category so far. + """ + events = self.all_events("output", some.dict.containing({"category": category})) + return "".join(event("output", unicode) for event in events) - env["PTVSD_SESSION_ID"] = str(self.id) + def _request_start(self, method): + self.config.normalize() + start_request = self.send_request(method, self.config) + + def wait_for_process_event(): + process = self.wait_for_next_event("process", freeze=False) + assert process == some.dict.containing( + { + "startMethod": self.start_request.command, + "name": some.str, + "isLocalProcess": True, + "systemProcessId": some.int, + } + ) - if self.backchannel is not None: - self.backchannel.listen() - env["PTVSD_BACKCHANNEL_PORT"] = str(self.backchannel.port) + # Depending on whether it's "noDebug" or not, we either get the "initialized" + # event, or an immediate response to our request. + self.timeline.wait_until_realized( + timeline.Event("initialized") | timeline.Response(start_request), + freeze=True, + ) - if self.log_dir is not None: - kwargs["logToFile"] = True + if start_request.response is not None: + # It was an immediate response - configuration is not possible. Just get + # the "process" event, and return to caller. + return wait_for_process_event() - self.captured_output = output.CaptureOutput(self) - self.start_method.configure(run_as, target, env=env, **kwargs) + # We got "initialized" - now we need to yield to the caller, so that it can + # configure the session before it starts running, and then give control back + # to us to finalize the configuration sequence. A nested context manager is + # used to ensure that all code up to this point executes eagerly. - def start_debugging(self): - start_request = self.start_method.start_debugging() - process = self.wait_for_next_event("process", freeze=False) - assert process == some.dict.containing( - { - "startMethod": start_request.command, - "name": some.str, - "isLocalProcess": True, - "systemProcessId": some.int, - } - ) + @contextlib.contextmanager + def configure(): + yield + self.request("configurationDone") + start_request.wait_for_response() + wait_for_process_event() + + return configure() + + def request_launch(self): + if "PYTHONPATH" in self.config.env: + # If specified, launcher will use it in lieu of PYTHONPATH it inherited + # from the adapter when spawning debuggee, so we need to adjust again. + self.config.env.prepend_to("PYTHONPATH", DEBUGGEE_PYTHONPATH.strpath) + return self._request_start("launch") + + def request_attach(self): + return self._request_start("attach") def request_continue(self): self.request("continue", freeze=False) @@ -383,6 +653,11 @@ def get_variable(self, varname, frame_id=None): """ return self.get_variables(varname, frame_id=frame_id)[0] + def wait_for_next_event(self, event, body=some.object, freeze=True): + return self.timeline.wait_for_next( + timeline.Event(event, body), freeze=freeze + ).body + def wait_for_stop( self, reason=some.str, @@ -423,29 +698,39 @@ def wait_for_stop( fid = frames[0]("id", int) return StopInfo(stopped, frames, tid, fid) - def wait_for_next_event(self, event, body=some.object, freeze=True): - return self.timeline.wait_for_next( - timeline.Event(event, body), freeze=freeze - ).body + def wait_for_next_subprocess(self): + raise NotImplementedError - def output(self, category): - """Returns all output of a given category as a single string, assembled from - all the "output" events received for that category so far. - """ - events = self.all_occurrences_of( - timeline.Event("output", some.dict.containing({"category": category})) - ) - return "".join(event("output", unicode) for event in events) + def wait_for_disconnect(self): + self.timeline.wait_until_realized(timeline.Mark("disconnect"), freeze=True) + + def wait_for_exit(self): + if self.debuggee is not None: + try: + self.debuggee.wait() + except Exception: + pass + finally: + watchdog.unregister_spawn(self.debuggee.pid, self.debuggee_id) + + self.timeline.wait_until_realized(timeline.Event("terminated")) + + # FIXME: "exited" event is not properly reported in attach scenarios at the + # moment, so the exit code is only checked if it's present. + if self.start_request.command == "launch": + assert self.exit_code is not None + if self.debuggee is not None and self.exit_code is not None: + assert self.debuggee.returncode == self.exit_code + return self.exit_code def captured_stdout(self, encoding=None): + assert self.debuggee is not None return self.captured_output.stdout(encoding) def captured_stderr(self, encoding=None): + assert self.debuggee is not None return self.captured_output.stderr(encoding) - def wait_for_disconnect(self): - self.timeline.wait_for_next(timeline.Mark("disconnect")) - def disconnect(self, force=False): if self.channel is None: return @@ -453,6 +738,7 @@ def disconnect(self, force=False): try: if not force: self.request("disconnect") + self.timeline.wait_until_realized(timeline.Event("terminated")) except messaging.JsonIOError: pass finally: diff --git a/tests/debug/start_methods.py b/tests/debug/start_methods.py deleted file mode 100644 index 2074c02cb..000000000 --- a/tests/debug/start_methods.py +++ /dev/null @@ -1,577 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See LICENSE in the project root -# for license information. - -from __future__ import absolute_import, division, print_function, unicode_literals - - -import os -import ptvsd -import psutil -import py.path -import pytest -import subprocess -import sys -import time - -from ptvsd.common import compat, fmt, json, log -from ptvsd.common.compat import unicode -from tests import net, timeline, watchdog -from tests.patterns import some - - -PTVSD_DIR = py.path.local(ptvsd.__file__) / ".." -PTVSD_PORT = net.get_test_server_port(5678, 5800) - -# Code that is injected into the debuggee process when it does `import debug_me`, -# and start_method is attach_socket_* -PTVSD_DEBUG_ME = """ -import ptvsd -ptvsd.enable_attach(("127.0.0.1", {ptvsd_port}), log_dir={log_dir!r}) -ptvsd.wait_for_attach() -""" - - -class DebugStartBase(object): - ignore_unobserved = [] - - def __init__(self, session, method="base"): - self.session = session - self.method = method - self.debuggee_process = None - self.expected_exit_code = None - - def start_debugging(self, **kwargs): - pass - - def wait_for_debuggee(self): - # TODO: Exit should not be restricted to launch tests only - if "launch" in self.method: - exited = self.session.timeline.wait_until_realized( - timeline.Event("exited") - ).body - assert exited == some.dict.containing( - { - "exitCode": some.int - if self.expected_exit_code is None - else self.expected_exit_code - } - ) - - self.session.timeline.wait_until_realized(timeline.Event("terminated")) - - if self.debuggee_process is None: - return - - try: - self.debuggee_process.wait() - except Exception: - pass - finally: - watchdog.unregister_spawn( - self.debuggee_process.pid, self.session.debuggee_id - ) - - def run_in_terminal(self, request, **kwargs): - raise request.isnt_valid("not supported") - - def _build_common_args( - self, - args, - showReturnValue=None, - justMyCode=True, - subProcess=None, - django=None, - jinja=None, - flask=None, - pyramid=None, - logToFile=None, - redirectOutput=True, - noDebug=None, - maxExceptionStackFrames=None, - steppingResumesAllThreads=None, - rules=None, - successExitCodes=None, - breakOnSystemExitZero=None, - pathMappings=None, - ): - if logToFile: - args["logToFile"] = logToFile - if "env" in args: - args["env"]["PTVSD_LOG_DIR"] = self.session.log_dir - - if showReturnValue: - args["showReturnValue"] = showReturnValue - args["debugOptions"] += ["ShowReturnValue"] - - if redirectOutput: - args["redirectOutput"] = redirectOutput - args["debugOptions"] += ["RedirectOutput"] - - if justMyCode is False: - # default behavior is Just-my-code = true - args["justMyCode"] = justMyCode - args["debugOptions"] += ["DebugStdLib"] - - if django: - args["django"] = django - args["debugOptions"] += ["Django"] - - if jinja: - args["jinja"] = jinja - args["debugOptions"] += ["Jinja"] - - if flask: - args["flask"] = flask - args["debugOptions"] += ["Flask"] - - if pyramid: - args["pyramid"] = pyramid - args["debugOptions"] += ["Pyramid"] - - # VS Code uses noDebug in both attach and launch cases. Even though - # noDebug on attach does not make any sense. - if noDebug: - args["noDebug"] = True - - if subProcess: - args["subProcess"] = subProcess - args["debugOptions"] += ["Multiprocess"] - - if maxExceptionStackFrames: - args["maxExceptionStackFrames"] = maxExceptionStackFrames - - if steppingResumesAllThreads is not None: - args["steppingResumesAllThreads"] = steppingResumesAllThreads - - if rules is not None: - args["rules"] = rules - - if successExitCodes: - args["successExitCodes"] = successExitCodes - - if breakOnSystemExitZero: - args["debugOptions"] += ["BreakOnSystemExitZero"] - - if pathMappings is not None: - args["pathMappings"] = pathMappings - - def __str__(self): - return self.method - - -class Launch(DebugStartBase): - def __init__(self, session): - super(Launch, self).__init__(session, "launch") - self._launch_args = None - - def _build_launch_args( - self, - launch_args, - run_as, - target, - pythonPath=sys.executable, - args=(), - cwd=None, - env=None, - stopOnEntry=None, - gevent=None, - sudo=None, - waitOnNormalExit=None, - waitOnAbnormalExit=None, - console="externalTerminal", - internalConsoleOptions="neverOpen", - **kwargs - ): - assert console in ("internalConsole", "integratedTerminal", "externalTerminal") - env = {} if env is None else dict(env) - debug_options = [] - launch_args.update( - { - "name": "Terminal", - "type": "python", - "request": "launch", - "console": console, - "env": env, - "pythonPath": pythonPath, - "args": args, - "internalConsoleOptions": internalConsoleOptions, - "debugOptions": debug_options, - } - ) - - if stopOnEntry: - launch_args["stopOnEntry"] = stopOnEntry - debug_options += ["StopOnEntry"] - - if gevent: - launch_args["gevent"] = gevent - env["GEVENT_SUPPORT"] = "True" - - if sudo: - launch_args["sudo"] = sudo - - if waitOnNormalExit: - debug_options += ["WaitOnNormalExit"] - - if waitOnAbnormalExit: - debug_options += ["WaitOnAbnormalExit"] - - target_str = target - if isinstance(target, py.path.local): - target_str = target.strpath - - if cwd: - launch_args["cwd"] = cwd - elif os.path.isfile(target_str) or os.path.isdir(target_str): - launch_args["cwd"] = os.path.dirname(target_str) - else: - launch_args["cwd"] = os.getcwd() - - if "PYTHONPATH" not in env: - env["PYTHONPATH"] = "" - - if run_as == "program": - launch_args["program"] = target_str - elif run_as == "module": - if os.path.isfile(target_str) or os.path.isdir(target_str): - env["PYTHONPATH"] += os.pathsep + os.path.dirname(target_str) - try: - launch_args["module"] = target_str[ - (len(os.path.dirname(target_str)) + 1) : -3 - ] - except Exception: - launch_args["module"] = "code_to_debug" - else: - launch_args["module"] = target_str - elif run_as == "code": - with open(target_str, "rb") as f: - launch_args["code"] = f.read().decode("utf-8") - else: - pytest.fail() - - self._build_common_args(launch_args, **kwargs) - return launch_args - - def configure(self, run_as, target, **kwargs): - self._launch_args = self._build_launch_args({}, run_as, target, **kwargs) - self.no_debug = self._launch_args.get("noDebug", False) - - if not self.no_debug: - self._launch_request = self.session.send_request( - "launch", self._launch_args - ) - self.session.wait_for_next_event("initialized") - - def start_debugging(self): - if self.no_debug: - self._launch_request = self.session.send_request( - "launch", self._launch_args - ) - else: - self.session.request("configurationDone") - - self._launch_request.wait_for_response(freeze=False) - return self._launch_request - - def run_in_terminal(self, request): - args = request("args", json.array(unicode)) - cwd = request("cwd", ".") - - env = os.environ.copy() - env.pop("COV_CORE_SOURCE", None) # disable codecov subprocess hook - env.update(request("env", json.object(unicode))) - - if sys.version_info < (3,): - args = [compat.filename_str(s) for s in args] - env = { - compat.filename_str(k): compat.filename_str(v) for k, v in env.items() - } - - log.info( - '{0} spawning {1} via "runInTerminal" request', - self.session, - self.session.debuggee_id, - ) - self.debuggee_process = psutil.Popen( - args, - cwd=cwd, - env=env, - bufsize=0, - stdin=subprocess.PIPE, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - ) - watchdog.register_spawn(self.debuggee_process.pid, self.session.debuggee_id) - self.session.captured_output.capture(self.debuggee_process) - return {} - - -class AttachBase(DebugStartBase): - ignore_unobserved = DebugStartBase.ignore_unobserved + [] - - def __init__(self, session, method): - super(AttachBase, self).__init__(session, method) - self._attach_args = {} - - def _build_attach_args( - self, - attach_args, - run_as, - target, - host="127.0.0.1", - port=PTVSD_PORT, - **kwargs - ): - assert host is not None - assert port is not None - debug_options = [] - attach_args.update( - { - "name": "Attach", - "type": "python", - "request": "attach", - "debugOptions": debug_options, - } - ) - - attach_args["host"] = host - attach_args["port"] = port - - self._build_common_args(attach_args, **kwargs) - return attach_args - - def configure(self, run_as, target, **kwargs): - target_str = target - if isinstance(target, py.path.local): - target_str = target.strpath - - env = os.environ.copy() - env.pop("COV_CORE_SOURCE", None) # disable codecov subprocess hook - env.update(kwargs["env"]) - - cli_args = kwargs.get("cli_args") - if run_as == "program": - cli_args += [target_str] - elif run_as == "module": - if os.path.isfile(target_str) or os.path.isdir(target_str): - env["PYTHONPATH"] += os.pathsep + os.path.dirname(target_str) - try: - module = target_str[(len(os.path.dirname(target_str)) + 1) : -3] - except Exception: - module = "code_to_debug" - else: - module = target_str - cli_args += ["-m", module] - elif run_as == "code": - with open(target_str, "rb") as f: - cli_args += ["-c", f.read()] - else: - pytest.fail() - - cli_args += kwargs.get("args") - cli_args = [compat.filename_str(s) for s in cli_args] - env = {compat.filename_str(k): compat.filename_str(v) for k, v in env.items()} - - cwd = kwargs.get("cwd") - if cwd: - pass - elif os.path.isfile(target_str) or os.path.isdir(target_str): - cwd = os.path.dirname(target_str) - else: - cwd = os.getcwd() - - if "pathMappings" not in self._attach_args: - self._attach_args["pathMappings"] = [{"localRoot": cwd, "remoteRoot": "."}] - - env_str = "\n".join((fmt(" {0}={1}", k, env[k]) for k in sorted(env.keys()))) - log.info( - "Spawning {0}: {1!j}\n\n" "with cwd:\n {2!j}\n\n" "with env:\n{3}", - self.session.debuggee_id, - cli_args, - cwd, - env_str, - ) - - self.debuggee_process = psutil.Popen( - cli_args, - cwd=cwd, - env=env, - bufsize=0, - stdin=subprocess.PIPE, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - ) - watchdog.register_spawn(self.debuggee_process.pid, self.session.debuggee_id) - self.session.captured_output.capture(self.debuggee_process) - pid = self.debuggee_process.pid - - if self.method == "attach_pid": - self._attach_args["processId"] = pid - else: - log.info( - "Waiting for {0} to open listener socket...", self.session.debuggee_id - ) - for i in range(0, 100): - connections = psutil.net_connections() - if any(p == pid for (_, _, _, _, _, _, p) in connections): - break - time.sleep(0.1) - else: - log.warning("Couldn't detect open listener socket; proceeding anyway.") - - self._attach_request = self.session.send_request("attach", self._attach_args) - self.session.wait_for_next_event("initialized") - - def start_debugging(self): - self.session.request("configurationDone") - - self.no_debug = self._attach_args.get("noDebug", False) - if self.no_debug: - log.info('{0} ignoring "noDebug" in "attach"', self.session) - - self._attach_request.wait_for_response() - return self._attach_request - - -class AttachSocketImport(AttachBase): - def __init__(self, session): - super(AttachSocketImport, self).__init__(session, "attach_socket_import") - - def _check_ready_for_import(self, path_or_code): - if isinstance(path_or_code, py.path.local): - path_or_code = path_or_code.strpath - - if os.path.isfile(path_or_code): - with open(path_or_code, "rb") as f: - code = f.read() - elif "\n" in path_or_code: - code = path_or_code - else: - # path_or_code is a module name - return - assert b"debug_me" in code, fmt( - "{0} is started via {1}, but it doesn't import debug_me.", - path_or_code, - self.method, - ) - - def configure( - self, - run_as, - target, - pythonPath=sys.executable, - args=(), - cwd=None, - env=None, - **kwargs - ): - env = {} if env is None else dict(env) - self._attach_args = self._build_attach_args({}, run_as, target, **kwargs) - - ptvsd_port = self._attach_args["port"] - log_dir = ( - self.session.log_dir - if not self._attach_args.get("logToFile", False) - else None - ) - env["PTVSD_DEBUG_ME"] = fmt( - PTVSD_DEBUG_ME, ptvsd_port=ptvsd_port, log_dir=log_dir - ) - - self._check_ready_for_import(target) - - cli_args = [pythonPath] - super(AttachSocketImport, self).configure( - run_as, target, cwd=cwd, env=env, args=args, cli_args=cli_args, **kwargs - ) - - -class AttachSocketCmdLine(AttachBase): - def __init__(self, session): - super(AttachSocketCmdLine, self).__init__(session, "attach_socket_cmdline") - - def configure( - self, - run_as, - target, - pythonPath=sys.executable, - args=(), - cwd=None, - env=None, - **kwargs - ): - env = {} if env is None else dict(env) - self._attach_args = self._build_attach_args({}, run_as, target, **kwargs) - - cli_args = [pythonPath] - cli_args += [PTVSD_DIR.strpath] - cli_args += ["--wait"] - cli_args += [ - "--host", - self._attach_args["host"], - "--port", - str(self._attach_args["port"]), - ] - - log_dir = ( - self.session.log_dir if self._attach_args.get("logToFile", False) else None - ) - if log_dir: - cli_args += ["--log-dir", log_dir] - - if self._attach_args.get("subProcess", False): - cli_args += ["--multiprocess"] - - super(AttachSocketCmdLine, self).configure( - run_as, target, cwd=cwd, env=env, args=args, cli_args=cli_args, **kwargs - ) - - -class AttachProcessId(AttachBase): - def __init__(self, session): - super(AttachProcessId, self).__init__(session, "attach_pid") - - def configure( - self, - run_as, - target, - pythonPath=sys.executable, - args=(), - cwd=None, - env=None, - **kwargs - ): - env = {} if env is None else dict(env) - self._attach_args = self._build_attach_args({}, run_as, target, **kwargs) - - log_dir = ( - self.session.log_dir if self._attach_args.get("logToFile", False) else None - ) - if log_dir: - self._attach_args["ptvsdArgs"] = ["--log-dir", log_dir] - - cli_args = [pythonPath] - super(AttachProcessId, self).configure( - run_as, target, cwd=cwd, env=env, args=args, cli_args=cli_args, **kwargs - ) - - -class CustomServer(DebugStartBase): - def __init__(self, session): - super().__init__(session, "custom_server") - - -class CustomClient(DebugStartBase): - def __init__(self, session): - super().__init__(session, "custom_client") - - -__all__ = [ - Launch, # ptvsd --client ... foo.py - AttachSocketCmdLine, # ptvsd ... foo.py - AttachSocketImport, # python foo.py (foo.py must import debug_me) - AttachProcessId, # python foo.py && ptvsd ... --pid - CustomClient, # python foo.py (foo.py has to manually call ptvsd.attach) - CustomServer, # python foo.py (foo.py has to manually call ptvsd.enable_attach) -] diff --git a/tests/debug/targets.py b/tests/debug/targets.py new file mode 100644 index 000000000..e954aef77 --- /dev/null +++ b/tests/debug/targets.py @@ -0,0 +1,167 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See LICENSE in the project root +# for license information. + +from __future__ import absolute_import, division, print_function, unicode_literals + +import py + +from ptvsd.common import fmt +from tests.patterns import some + + +class Target(object): + """Describes Python code that gets run by a Runner. + """ + + def __init__(self, filename, args=()): + if filename is not None and not isinstance(filename, py.path.local): + filename = py.path.local(filename) + + self.filename = filename + self.args = args + + if self.filename is None: + self.code = None + else: + with open(self.filename.strpath, "rb") as f: + self.code = f.read().decode("utf-8") + + def configure(self, session): + """Configures the session to execute this target. + + This should only modify session.config, but gets access to the entire session + to retrieve information about it. + """ + + raise NotImplementedError + + def cli(self, env): + """Provides the command line arguments, suitable for passing to python or + python -m ptvsd, to execute this target. + + Returns command line arguments as a list, e.g. ["-m", "module"]. + + If any environment variables are needed to properly interpret the command + line - e.g. PYTHONPATH - the implementation should send them in env. + """ + raise NotImplementedError + + @property + def co_filename(self): + """co_filename of code objects created at runtime from the source that this + Target describes, assuming no path mapping. + """ + assert ( + self.filename is not None + ), "co_filename requires Target created from filename" + return self.filename.strpath + + @property + def source(self): + """DAP "source" JSON for this Target.""" + return some.dap.source(py.path.local(self.co_filename)) + + @property + def lines(self): + """Same as self.filename.lines, if it is valid - e.g. for @pyfile objects. + """ + assert ( + self.filename is not None + ), "lines() requires Target created from filename" + return self.filename.lines + + +class Program(Target): + """A Python script, executed directly: python foo.py + """ + + pytest_id = "program" + + def __repr__(self): + return fmt("program {0!j}", self.filename) + + def configure(self, session): + session.config["program"] = ( + [self.filename] + self.args if len(self.args) else self.filename + ) + + def cli(self, env): + return [self.filename.strpath] + list(self.args) + + +class Module(Target): + """A Python module, executed by name: python -m foo.bar + + If created from a filename, the module name is the name of the file, and the + Target will automatically add a PYTHONPATH entry. + """ + + pytest_id = "module" + + def __init__(self, filename=None, name=None, args=()): + assert (filename is None) ^ (name is None) + super(Module, self).__init__(filename, args) + self.name = name if name is not None else self.filename.purebasename + + def __repr__(self): + return fmt("module {0}", self.name) + + def configure(self, session): + session.config["module"] = ( + [self.name] + self.args if len(self.args) else self.name + ) + + def cli(self, env): + if self.filename is not None: + env.prepend_to("PYTHONPATH", self.filename.dirname) + return ["-m", self.name] + list(self.args) + + +class Code(Target): + """A snippet of Python code: python -c "print('foo')" + + If created from a filename, the code is the contents of the file. + """ + + pytest_id = "code" + + def __init__(self, filename=None, code=None, args=()): + assert (filename is None) ^ (code is None) + super(Code, self).__init__(filename, args) + if code is not None: + self.code = code + + def __repr__(self): + lines = self.code.split("\n") + return fmt("code: {0!j}", lines) + + def configure(self, session): + session.config["code"] = ( + [self.code] + self.args if len(self.args) else self.code + ) + + def cli(self, env): + return ["-c", self.code] + list(self.args) + + @property + def co_filename(self): + return "" + + @property + def source(self): + """DAP "source" JSON for this Target.""" + return some.dap.source("") + + +all_named = [Program, Module] +"""All targets that produce uniquely named code objects at runtime, and thus can +have breakpoints set in them. +""" + +all_unnamed = [Code] +"""All targets that produce unnamed code objects at runtime, and thus cannot have +breakpoints set in them. +""" + +all = all_named + all_unnamed diff --git a/tests/debug_old.py b/tests/debug_old.py deleted file mode 100644 index 7975bc4b4..000000000 --- a/tests/debug_old.py +++ /dev/null @@ -1,1306 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See LICENSE in the project root -# for license information. - -from __future__ import absolute_import, print_function, unicode_literals - -import collections -import itertools -import os -import platform -import psutil -import py.path -import pytest -import socket -import subprocess -import sys -import threading -import time - -import ptvsd -from ptvsd.common import compat, fmt, log, messaging -import tests -from tests import code, net, watchdog -from tests.patterns import some -from tests.timeline import Timeline, Event, Request, Response - -PTVSD_DIR = py.path.local(ptvsd.__file__) / ".." -PTVSD_ADAPTER_DIR = PTVSD_DIR / "adapter" -PTVSD_PORT = net.get_test_server_port(5678, 5800) - -# Added to the environment variables of every new debug.Session - after copying -# os.environ(), but before setting any session-specific variables. -PTVSD_ENV = { -} - -# Code that is injected into the debuggee process when it does `import debug_me`, -# and start_method is attach_socket_* -PTVSD_DEBUG_ME = """ -import ptvsd -ptvsd.enable_attach(("localhost", {ptvsd_port})) -ptvsd.wait_for_attach() -""" - - -StopInfo = collections.namedtuple('StopInfo', [ - 'body', - 'frames', - 'thread_id', - 'frame_id', -]) - - -class Session(object): - WAIT_FOR_EXIT_TIMEOUT = 10 - """Timeout used by wait_for_exit() before it kills the ptvsd process tree. - """ - - START_METHODS = { - 'launch', # ptvsd --client ... foo.py - 'attach_socket_cmdline', # ptvsd ... foo.py - 'attach_socket_import', # python foo.py (foo.py must import debug_me) - 'attach_pid', # python foo.py && ptvsd ... --pid - 'custom_client', # python foo.py (foo.py has to manually call ptvsd.attach) - 'custom_server', # python foo.py (foo.py has to manually call ptvsd.enable_attach) - } - - DEBUG_ME_START_METHODS = {"attach_socket_import"} - """Start methods that require import debug_me.""" - - _counter = itertools.count(1) - - def __init__(self, start_method=None, pid=None, ptvsd_port=None): - self._created = False - if pid is None: - assert start_method in self.START_METHODS - assert ptvsd_port is None or start_method.startswith('attach_socket_') - else: - assert start_method is None - assert ptvsd_port is not None - start_method = "custom_server" - - watchdog.start() - self.id = next(self._counter) - - format_string = 'New debug session {session}' - if pid is None: - format_string += '; debugged process will be started via {start_method!r}.' - else: - format_string += " for existing process with pid={pid}." - log.info(format_string, session=self, start_method=start_method, pid=pid) - - self.lock = threading.RLock() - self.target = None - self.start_method = start_method - self.start_method_args = {} - self.no_debug = False - self.ptvsd_port = ptvsd_port or PTVSD_PORT - self.debug_options = {'RedirectOutput'} - self.path_mappings = [] - self.success_exitcodes = None - self.rules = [] - self.cwd = None - self.expected_returncode = 0 - self.program_args = [] - self.log_dir = None - self._before_connect = lambda: None - - self.env = os.environ.copy() - self.env.update(PTVSD_ENV) - self.env['PYTHONPATH'] = (tests.root / "DEBUGGEE_PYTHONPATH").strpath - self.env['PTVSD_SESSION_ID'] = str(self.id) - - self.process = None - self.process_exited = False - self.pid = pid - self.is_running = pid is not None - self.psutil_process = psutil.Process(self.pid) if self.is_running else None - self.kill_ptvsd_on_close = True - self.socket = None - self.server_socket = None - self.connected = threading.Event() - self.backchannel = None - self.scratchpad = ScratchPad(self) - - self.capture_output = True - self.captured_output = CapturedOutput(self) - - self.timeline = Timeline(ignore_unobserved=[ - Event('output'), - Event('thread', some.dict.containing({'reason': 'exited'})) - ]) - self.timeline.freeze() - self.perform_handshake = True - - # Expose some common members of timeline directly - these should be the ones - # that are the most straightforward to use, and are difficult to use incorrectly. - # Conversely, most tests should restrict themselves to this subset of the API, - # and avoid calling members of timeline directly unless there is a good reason. - self.new = self.timeline.new - self.observe = self.timeline.observe - self.wait_for_next = self.timeline.wait_for_next - self.proceed = self.timeline.proceed - self.expect_new = self.timeline.expect_new - self.expect_realized = self.timeline.expect_realized - self.all_occurrences_of = self.timeline.all_occurrences_of - self.observe_all = self.timeline.observe_all - - # adapter process - self._adapter_process = None - self._psutil_adapter = None - - # This must always be the last attribute set, to avoid issues with __del__ - # trying to cleanup a partially constructed Session. - self._created = True - - def __str__(self): - return fmt("ptvsd-{0}", self.id) - - def __del__(self): - if not self._created: - return - # Always kill the process tree, even if kill_ptvsd_on_close is False. Any - # test that wants to keep it alive should do so explicitly by keeping the - # Session object alive for the requisite amount of time after it is closed. - # But there is no valid scenario in which any ptvsd process should outlive - # the test in which it was spawned. - self.kill_process_tree() - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - if exc_type is not None: - # Log the error, in case another one happens during shutdown. - log.exception(exc_info=(exc_type, exc_val, exc_tb)) - - # If we're exiting a failed test, make sure that all output from the debuggee - # process has been received and logged, before we close the sockets and kill - # the process tree. In success case, wait_for_exit() takes care of that. - # If it failed in the middle of the test, the debuggee process might still - # be alive, and waiting for the test to tell it to continue. In this case, - # it will never close its stdout/stderr, so use a reasonable timeout here. - self.captured_output.wait(timeout=1) - - was_final = self.timeline.is_final - self.close() - assert exc_type is not None or was_final, ( - 'Session timeline must be finalized before session goes out of scope at the end of the ' - 'with-statement. Use wait_for_exit(), wait_for_termination(), or wait_for_disconnect() ' - 'as appropriate.' - ) - - def __contains__(self, expectation): - return expectation in self.timeline - - @property - def ignore_unobserved(self): - return self.timeline.ignore_unobserved - - @ignore_unobserved.setter - def ignore_unobserved(self, value): - self.timeline.ignore_unobserved = value - - def close(self): - with self.lock: - if self.socket: - log.debug('Closing {0} socket...', self) - try: - self.socket.shutdown(socket.SHUT_RDWR) - except Exception: - pass - try: - self.socket.close() - except Exception: - pass - self.socket = None - - if self.server_socket: - log.debug('Closing {0} server socket...', self) - try: - self.server_socket.shutdown(socket.SHUT_RDWR) - except Exception: - pass - try: - self.server_socket.close() - except Exception: - pass - self.server_socket = None - - if self.backchannel: - self.backchannel.close() - self.backchannel = None - - if self.kill_ptvsd_on_close: - self.kill_process_tree() - - log.info('{0} closed', self) - - def _get_adapter_args(self): - argv = [sys.executable] - argv += [PTVSD_ADAPTER_DIR.strpath] - return argv - - def _get_argv_for_attach_using_import(self): - argv = [sys.executable] - return argv - - def _get_argv_for_launch(self): - argv = [sys.executable] - argv += [PTVSD_DIR.strpath] - argv += ['--client'] - argv += ['--host', 'localhost', '--port', str(self.ptvsd_port)] - return argv - - def _get_argv_for_attach_using_cmdline(self): - argv = [sys.executable] - argv += [PTVSD_DIR.strpath] - argv += ['--wait'] - argv += ['--host', 'localhost', '--port', str(self.ptvsd_port)] - return argv - - def _get_argv_for_attach_using_pid(self): - argv = [sys.executable] - argv += [PTVSD_DIR.strpath] - argv += ['--client', '--host', 'localhost', '--port', str(self.ptvsd_port)] - # argv += ['--pid', ''] # pid value to be appended later - return argv - - def _get_argv_for_custom_server(self): - return [sys.executable] - - def _get_argv_for_custom_client(self): - return [sys.executable] - - def _validate_pyfile(self, filename): - assert os.path.isfile(filename) - with open(filename, "rb") as f: - code = f.read() - if self.start_method in self.DEBUG_ME_START_METHODS: - assert b"debug_me" in code, fmt( - "{0} is started via {1}, but it doesn't import debug_me.", - filename, - self.start_method, - ) - - return code - - def _get_target(self): - argv = [] - run_as, path_or_code = self.target - if isinstance(path_or_code, py.path.local): - path_or_code = path_or_code.strpath - if run_as == 'file': - self._validate_pyfile(path_or_code) - argv += [path_or_code] - if self.cwd is None: - self.cwd = os.path.dirname(path_or_code) - elif run_as == 'module': - if os.path.isfile(path_or_code): - self._validate_pyfile(path_or_code) - if os.path.isfile(path_or_code) or os.path.isdir(path_or_code): - self.env['PYTHONPATH'] += os.pathsep + os.path.dirname(path_or_code) - try: - module = path_or_code[(len(os.path.dirname(path_or_code)) + 1): -3] - except Exception: - module = 'code_to_debug' - argv += ['-m', module] - else: - argv += ['-m', path_or_code] - elif run_as == 'code': - if os.path.isfile(path_or_code): - path_or_code = self._validate_pyfile(path_or_code) - argv += ['-c', path_or_code] - else: - pytest.fail() - return argv - - def _setup_session(self, **kwargs): - self.ignore_unobserved += [ - Event('thread', some.dict.containing({'reason': 'started'})), - Event('module') - ] + kwargs.pop('ignore_unobserved', []) - - self.env.update(kwargs.pop('env', {})) - self.start_method_args.update(kwargs.pop('args', {})) - - self.debug_options |= set(kwargs.pop('debug_options', [])) - self.path_mappings += kwargs.pop('path_mappings', []) - self.program_args += kwargs.pop('program_args', []) - self.rules += kwargs.pop('rules', []) - - for k, v in kwargs.items(): - setattr(self, k, v) - - assert self.start_method in self.START_METHODS - assert len(self.target) == 2 - assert self.target[0] in ('file', 'module', 'code') - - def setup_backchannel(self): - """Creates a BackChannel object associated with this Session, and returns it. - - The debuggee must import backchannel to establish the connection. - """ - assert self.process is None, ( - "setup_backchannel() must be called before initialize()" - ) - self.backchannel = BackChannel(self) - return self.backchannel - - def before_connect(self, func): - """Registers a function to be invoked by initialize() before connecting to - the debuggee, or before waiting for an incoming connection, but after all - the session parameters (port number etc) are determined.""" - self._before_connect = func - - def initialize(self, **kwargs): - """Spawns ptvsd using the configured method, telling it to execute the - provided Python file, module, or code, and establishes a message channel - to it. - - If perform_handshake is True, calls self.handshake() before returning. - """ - - self._setup_session(**kwargs) - start_method = self.start_method - - log.debug('Initializing debug session for {0}', self) - if self.ignore_unobserved: - log.info( - "Will not complain about unobserved:\n\n{0}", - "\n\n".join(repr(exp) for exp in self.ignore_unobserved) - ) - - dbg_argv = [] - usr_argv = [] - - if start_method == 'launch': - dbg_argv += self._get_argv_for_launch() - elif start_method == 'attach_socket_cmdline': - dbg_argv += self._get_argv_for_attach_using_cmdline() - elif start_method == 'attach_socket_import': - dbg_argv += self._get_argv_for_attach_using_import() - # TODO: Remove adding to python path after enabling Tox - self.env['PYTHONPATH'] = (PTVSD_DIR / "..").strpath + os.pathsep + self.env['PYTHONPATH'] - self.env['PTVSD_DEBUG_ME'] = fmt(PTVSD_DEBUG_ME, ptvsd_port=self.ptvsd_port) - elif start_method == 'attach_pid': - self._listen() - dbg_argv += self._get_argv_for_attach_using_pid() - elif start_method == 'custom_client': - self._listen() - dbg_argv += self._get_argv_for_custom_client() - elif start_method == 'custom_server': - dbg_argv += self._get_argv_for_custom_server() - else: - pytest.fail() - - adapter_args = self._get_adapter_args() - - if self.log_dir: - dbg_argv += ['--log-dir', self.log_dir] - adapter_args += ['--log-dir', self.log_dir] - - if self.no_debug: - dbg_argv += ['--nodebug'] - - if start_method == 'attach_pid': - usr_argv += [sys.executable] - usr_argv += self._get_target() - else: - dbg_argv += self._get_target() - - if self.program_args: - if start_method == 'attach_pid': - usr_argv += list(self.program_args) - else: - dbg_argv += list(self.program_args) - - if self.backchannel: - self.backchannel.listen() - self.env['PTVSD_BACKCHANNEL_PORT'] = str(self.backchannel.port) - - # Normalize args to either bytes or unicode, depending on Python version. - # Assume that values are filenames - it's usually either that, or numbers. - make_filename = compat.filename_bytes if sys.version_info < (3,) else compat.filename - env = { - compat.force_str(k): make_filename(v) - for k, v in self.env.items() - } - - env_str = "\n".join(( - fmt("{0}={1}", env_name, env[env_name]) - for env_name in sorted(env.keys()) - )) - - cwd = self.cwd - if isinstance(cwd, py.path.local): - cwd = cwd.strpath - - log.info( - '{0} will have:\n\n' - 'ptvsd: {1}\n' - 'port: {2}\n' - 'start method: {3}\n' - 'target: ({4}) {5}\n' - 'current directory: {6}\n' - 'environment variables:\n\n{7}', - self, - py.path.local(ptvsd.__file__).dirpath(), - self.ptvsd_port, - start_method, - self.target[0], - self.target[1], - self.cwd, - env_str, - ) - - adapter_args = [make_filename(s) for s in adapter_args] - - log.info('Spawning adapter {0}:\n\n{1}', self, "\n".join((repr(s) for s in adapter_args))) - stdio = {} - stdio["stdin"] = stdio["stdout"] = subprocess.PIPE - self._adapter_process = subprocess.Popen( - adapter_args, - env=env, - cwd=cwd, - bufsize=0, - **stdio - ) - self._psutil_adapter = psutil.Process(self._adapter_process.pid) - log.info('Spawned adapter {0} with pid={1}', self, self._adapter_process.pid) - self._setup_adapter_messaging() - # watchdog.register_spawn(self._adapter_process.pid, str(self)) - - # spawn_args = usr_argv if start_method == 'attach_pid' else dbg_argv - - # # Normalize args to either bytes or unicode, depending on Python version. - # spawn_args = [make_filename(s) for s in spawn_args] - - # log.info('Spawning {0}:\n\n{1}', self, "\n".join((repr(s) for s in spawn_args))) - # stdio = {} - # if self.capture_output: - # stdio["stdin"] = stdio["stdout"] = stdio["stderr"] = subprocess.PIPE - # self.process = subprocess.Popen( - # spawn_args, - # env=env, - # cwd=cwd, - # bufsize=0, - # **stdio - # ) - # self.pid = self.process.pid - # self.psutil_process = psutil.Process(self.pid) - self.is_running = True - # log.info('Spawned {0} with pid={1}', self, self.pid) - # watchdog.register_spawn(self.pid, str(self)) - - # if self.capture_output: - # self.captured_output.capture(self.process) - - if start_method == 'attach_pid': - # This is a temp process spawned to inject debugger into the debuggee. - dbg_argv += ['--pid', str(self.pid)] - attach_helper_name = fmt("attach_helper-{0}", self.id) - log.info( - "Spawning {0} for {1}:\n\n{2}", - attach_helper_name, - self, - "\n".join((repr(s) for s in dbg_argv)) - ) - attach_helper = psutil.Popen(dbg_argv) - log.info('Spawned {0} with pid={1}', attach_helper_name, attach_helper.pid) - watchdog.register_spawn(attach_helper.pid, attach_helper_name) - try: - attach_helper.wait() - finally: - watchdog.unregister_spawn(attach_helper.pid, attach_helper_name) - - self._before_connect() - - if start_method.startswith("attach_socket_") or start_method == "custom_server": - self.connect() - self.connected.wait() - - assert self.ptvsd_port - # assert self.socket - - if self.perform_handshake: - return self.handshake() - - def wait_for_disconnect(self, close=True): - """Waits for the connected ptvsd process to disconnect. - """ - - log.info('Waiting for {0} to disconnect', self) - - self.captured_output.wait() - self.channel.close() - self.timeline.finalize() - if close: - self.timeline.close() - - def wait_for_termination(self, close=False): - # BUG: ptvsd sometimes exits without sending 'terminate' or 'exited', likely due to - # https://github.com/Microsoft/ptvsd/issues/530. So rather than wait for them, wait until - # we disconnect, then check those events for proper body only if they're actually present. - - self.wait_for_disconnect(close=False) - - if Event('exited') in self: - expected_returncode = self.expected_returncode - - # Due to https://github.com/Microsoft/ptvsd/issues/1278, exit code is not recorded - # in the "exited" event correctly in attach scenarios on Windows. - if self.start_method == 'attach_socket_import' and platform.system() == 'Windows': - expected_returncode = some.int - - self.expect_realized( - Event( - 'exited', - some.dict.containing({'exitCode': expected_returncode}) - ) - ) - - if Event('terminated') in self: - self.expect_realized(Event('exited') >> Event('terminated')) - - if close: - self.timeline.close() - - def wait_for_exit(self): - """Waits for the spawned ptvsd process to exit. If it doesn't exit within - WAIT_FOR_EXIT_TIMEOUT seconds, forcibly kills the process tree. After the - process exits, validates its return code to match expected_returncode. - """ - - if not self.is_running: - return - - self.send_request('disconnect') - assert self._adapter_process is not None - - timed_out = [] - def kill_after_timeout(): - time.sleep(self.WAIT_FOR_EXIT_TIMEOUT) - if self.is_running: - log.warning( - 'wait_for_exit() timed out while waiting for {0} (pid={1})', - 'ptvsd.adapter', - self._adapter_process.pid, - ) - timed_out[:] = [True] - self.kill_process_tree() - - kill_thread = threading.Thread( - target=kill_after_timeout, - name=fmt( - 'wait_for_exit({0!r}, pid={1!r})', - 'ptvsd.adapter', - self._adapter_process.pid, - ), - ) - kill_thread.daemon = True - kill_thread.start() - - self.is_running = False - self.wait_for_termination(close=True) - - log.info('Waiting for {0} (pid={1}) to terminate...', self, self.pid) - returncode = self._psutil_adapter.wait() - watchdog.unregister_spawn(self.pid, str(self)) - self.process_exited = True - - assert not timed_out, "wait_for_exit() timed out" - assert returncode == self.expected_returncode - - def kill_process_tree(self): - if self.psutil_process is None or self.process_exited: - return - - log.info('Killing {0} process tree...', self) - - procs = [self.psutil_process] - try: - procs += self.psutil_process.children(recursive=True) - except Exception: - pass - - for p in procs: - log.warning( - "Killing {0} {1}process (pid={2})", - self, - "" if p.pid == self.pid else "child ", - p.pid, - ) - try: - p.kill() - except psutil.NoSuchProcess: - pass - except Exception: - log.exception() - - self.process_exited = True - log.info('Killed {0} process tree', self) - - self.captured_output.wait() - self.close_stdio() - - def close_stdio(self): - if self.process is None: - return - - log.debug('Closing stdio pipes of {0}...', self) - try: - self.process.stdin.close() - except Exception: - pass - try: - self.process.stdout.close() - except Exception: - pass - try: - self.process.stderr.close() - except Exception: - pass - - def _listen(self): - self.server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - self.server_socket.bind(('localhost', 0)) - _, self.ptvsd_port = self.server_socket.getsockname() - self.server_socket.listen(0) - - def accept_worker(): - with self.lock: - server_socket = self.server_socket - if server_socket is None: - return - - log.info('Listening for incoming connection from {0} on port {1}...', self, self.ptvsd_port) - try: - sock, _ = server_socket.accept() - except Exception: - log.exception() - return - log.info('Incoming connection from {0} accepted.', self) - - try: - sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) - with self.lock: - if self.server_socket is not None: - self.socket = sock - sock = None - self._setup_channel() - else: - # self.close() has been called concurrently. - pass - finally: - if sock is not None: - try: - sock.close() - except Exception: - pass - - accept_thread = threading.Thread(target=accept_worker, name=fmt('{0} listener', self)) - accept_thread.daemon = True - accept_thread.start() - - def connect(self): - # ptvsd will take some time to spawn and start listening on the port, - # so just hammer at it until it responds (or we time out). - while not self.socket: - try: - self._try_connect() - except Exception: - log.exception('Error connecting to {0}; retrying ...', self, category="warning") - time.sleep(0.1) - self._setup_channel() - - def _try_connect(self): - log.info('Trying to connect to {0} on port {1}...', self, self.ptvsd_port) - sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - sock.connect(('localhost', self.ptvsd_port)) - log.info('Connected to {0}.', self) - self.socket = sock - - def _setup_channel(self): - self.stream = messaging.JsonIOStream.from_socket(self.socket, name=str(self)) - handlers = messaging.MessageHandlers(request=self._process_request, event=self._process_event) - self.channel = messaging.JsonMessageChannel(self.stream, handlers) - self.channel.start() - self.connected.set() - - def _setup_adapter_messaging(self): - self.stream = messaging.JsonIOStream.from_process(self._adapter_process, name=str(self)) - handlers = messaging.MessageHandlers(request=self._process_request, event=self._process_event) - self.channel = messaging.JsonMessageChannel(self.stream, handlers) - self.channel.start() - self.connected.set() - - def send_request(self, command, arguments=None, proceed=True): - if self.timeline.is_frozen and proceed: - self.proceed() - - message = self.channel.send_request(command, arguments) - request = self.timeline.record_request(message) - - # Register callback after recording the request, so that there's no race - # between it being recorded, and the response to it being received. - message.on_response( - lambda response: self._process_response(request, response) - ) - - return request - - def request(self, *args, **kwargs): - freeze = kwargs.pop("freeze", True) - raise_if_failed = kwargs.pop("raise_if_failed", True) - return self.send_request(*args, **kwargs).wait_for_response( - freeze=freeze, - raise_if_failed=raise_if_failed, - ).body - - def handshake(self): - """Performs the handshake that establishes the debug session ('initialized' - and 'launch' or 'attach'). - - After this method returns, ptvsd is not running any code yet, but it is - ready to accept any configuration requests (e.g. for initial breakpoints). - Once initial configuration is complete, start_debugging() should be called - to finalize the configuration stage, and start running code. - """ - - telemetry = self.wait_for_next_event('output') - assert telemetry == { - 'category': 'telemetry', - 'output': 'ptvsd.adapter', - 'data': {'version': some.str}, - #'data': {'version': ptvsd.__version__}, - } - - self.request('initialize', { - 'adapterID': 'test', - 'pathFormat': 'path' - }) - - request = 'launch' if self.start_method == 'launch' else 'attach' - self.start_method_args.update({ - 'debugOptions': list(self.debug_options), - 'pathMappings': self.path_mappings, - 'rules': self.rules, - 'python': sys.executable, - 'cwd': self.cwd, - }) - - if self.start_method == 'launch': - t = self._get_target() - try: - _, _t = self._get_target() - except: - _t = t - - run_as, _ = self.target - if run_as == 'file': - self.start_method_args.update({'program': _t}) - elif run_as == 'module': - self.start_method_args.update({'module': _t}) - else: - self.start_method_args.update({'code': _t}) - - if self.success_exitcodes is not None: - self.start_method_args['successExitCodes'] = self.success_exitcodes - self._launch_or_attach_request = self.send_request(request, self.start_method_args) - self.wait_for_next(Event('initialized')) - - - def start_debugging(self, freeze=True): - """Finalizes the configuration stage, and issues a 'configurationDone' request - to start running code under debugger. - - After this method returns, ptvsd is running the code in the script file or module - that was specified via self.target. - """ - - configurationDone_request = self.send_request('configurationDone') - start = self.wait_for_next(Response(configurationDone_request)) - - if self.no_debug: - self._launch_or_attach_request.wait_for_response() - else: - self.wait_for_next(Event('process') & Response(self._launch_or_attach_request)) - - # 'process' is expected right after 'launch' or 'attach'. - self.expect_new(Event('process', { - 'name': some.str, - 'isLocalProcess': True, - 'startMethod': 'launch' if self.start_method == 'launch' else 'attach', - 'systemProcessId': self.pid if self.pid is not None else some.int, - })) - - # Issue 'threads' so that we get the 'thread' event for the main thread now, - # rather than at some random time later during the test. - # Note: it's actually possible that the 'thread' event was sent before the 'threads' - # request (although the 'threads' will force 'thread' to be sent if it still wasn't). - self.send_request('threads').wait_for_response() - self.expect_realized(Event('thread')) - - if not freeze: - self.proceed() - - return start - - def _process_event(self, event): - if event.event == "ptvsd_subprocess": - pid = event.body["processId"] - watchdog.register_spawn(pid, fmt("{0}-subprocess-{1}", self, pid)) - self.timeline.record_event(event, block=False) - - - def _process_request(self, request): - self.timeline.record_request(request, block=False) - - def _process_response(self, request_occ, response): - self.timeline.record_response(request_occ, response, block=False) - if request_occ.command == "disconnect": - # Stop the message loop, since the ptvsd is going to close the connection - # from its end shortly after sending this event, and no further messages - # are expected. - log.info( - 'Received "disconnect" response from {0}; stopping message processing.', - 'ptvsd.adapter', - ) - raise EOFError(fmt("{0} disconnect", self)) - - def wait_for_next_event(self, event, body=some.object): - return self.timeline.wait_for_next(Event(event, body)).body - - def output(self, category): - """Returns all output of a given category as a single string, assembled from - all the "output" events received for that category so far. - """ - events = self.all_occurrences_of( - Event("output", some.dict.containing({"category": category})) - ) - return "".join(event.body["output"] for event in events) - - def captured_stdout(self, encoding=None): - return self.captured_output.stdout(encoding) - - def captured_stderr(self, encoding=None): - return self.captured_output.stderr(encoding) - - # Helpers for specific DAP patterns. - - def wait_for_stop(self, reason=some.str, expected_frames=None, expected_text=None, expected_description=None): - stopped_event = self.wait_for_next(Event('stopped', some.dict.containing({'reason': reason}))) - stopped = stopped_event.body - - if expected_text is not None: - assert expected_text == stopped['text'] - - if expected_description is not None: - assert expected_description == stopped['description'] - - tid = stopped['threadId'] - assert tid == some.int - - assert stopped['allThreadsStopped'] - if stopped['reason'] not in ['step', 'exception', 'breakpoint', 'entry']: - assert stopped['preserveFocusHint'] - - stack_trace = self.request('stackTrace', arguments={'threadId': tid}) - frames = stack_trace['stackFrames'] or [] - assert len(frames) == stack_trace['totalFrames'] - - if expected_frames: - assert len(expected_frames) <= len(frames) - assert expected_frames == frames[0:len(expected_frames)] - - fid = frames[0]['id'] - assert fid == some.int - - return StopInfo(stopped, frames, tid, fid) - - def request_continue(self): - self.request('continue', freeze=False) - - def request_disconnect(self): - self.request('disconnect', freeze=False) - - def set_breakpoints(self, path, lines): - """Sets breakpoints in the specified file, and returns the list of all the - corresponding DAP Breakpoint objects in the same order. - - If lines are specified, it should be an iterable in which every element is - either a line number or a string. If it is a string, then it is translated - to the corresponding line number via get_marked_line_numbers(path). - - If lines=all, breakpoints will be set on all the marked lines in the file. - """ - - # Don't fetch line markers unless needed - in some cases, the breakpoints - # might be set in a file that does not exist on disk (e.g. remote attach). - def get_marked_line_numbers(): - try: - return get_marked_line_numbers.cached - except AttributeError: - get_marked_line_numbers.cached = code.get_marked_line_numbers(path) - return get_marked_line_numbers() - - if lines is all: - lines = get_marked_line_numbers().keys() - - def make_breakpoint(line): - if isinstance(line, int): - descr = str(line) - else: - marker = line - line = get_marked_line_numbers()[marker] - descr = fmt("{0} (@{1})", line, marker) - bp_log.append((line, descr)) - return {'line': line} - - bp_log = [] - breakpoints = self.request( - 'setBreakpoints', - { - 'source': {'path': path}, - 'breakpoints': [make_breakpoint(line) for line in lines], - }, - ).get('breakpoints', []) - - bp_log = sorted(bp_log, key=lambda pair: pair[0]) - bp_log = ", ".join((descr for _, descr in bp_log)) - log.info("Breakpoints set in {0}: {1}", path, bp_log) - - return breakpoints - - def get_variables(self, *varnames, **kwargs): - """Fetches the specified variables from the frame specified by frame_id, or - from the topmost frame in the last "stackTrace" response if frame_id is not - specified. - - If varnames is empty, then all variables in the frame are returned. The result - is an OrderedDict, in which every entry has variable name as the key, and a - DAP Variable object as the value. The original order of variables as reported - by the debugger is preserved. - - If varnames is not empty, then only the specified variables are returned. - The result is a tuple, in which every entry is a DAP Variable object; those - entries are in the same order as varnames. - """ - - assert self.timeline.is_frozen - - frame_id = kwargs.pop("frame_id", None) - if frame_id is None: - stackTrace_responses = self.all_occurrences_of( - Response(Request("stackTrace")) - ) - assert stackTrace_responses, ( - 'get_variables() without frame_id requires at least one response ' - 'to a "stackTrace" request in the timeline.' - ) - stack_trace = stackTrace_responses[-1].body - frame_id = stack_trace["stackFrames"][0]["id"] - - scopes = self.request("scopes", {"frameId": frame_id})["scopes"] - assert len(scopes) > 0 - - variables = self.request( - "variables", {"variablesReference": scopes[0]["variablesReference"]} - )["variables"] - - variables = collections.OrderedDict(((v["name"], v) for v in variables)) - if varnames: - assert set(varnames) <= set(variables.keys()) - return tuple((variables[name] for name in varnames)) - else: - return variables - - def get_variable(self, varname, frame_id=None): - """Same as get_variables(...)[0]. - """ - return self.get_variables(varname, frame_id=frame_id)[0] - - def attach_to_subprocess(self, ptvsd_subprocess): - assert ptvsd_subprocess == Event("ptvsd_subprocess") - - pid = ptvsd_subprocess.body['processId'] - child_port = ptvsd_subprocess.body['port'] - assert (pid, child_port) == (some.int, some.int) - - log.info( - 'Attaching to subprocess of {0} with pid={1} at port {2}', - self, - pid, - child_port, - ) - - child_session = Session(pid=pid, ptvsd_port=child_port) - try: - child_session.ignore_unobserved = self.ignore_unobserved - child_session.debug_options = self.debug_options - child_session.rules = self.rules - - child_session.connect() - child_session.connected.wait() - child_session.handshake() - except Exception: - child_session.close() - raise - else: - return child_session - - def attach_to_next_subprocess(self): - ptvsd_subprocess = self.wait_for_next(Event('ptvsd_subprocess')) - return self.attach_to_subprocess(ptvsd_subprocess) - - def reattach(self, **kwargs): - """Creates and initializes a new Session that tries to attach to the same - process. - - Upon return, handshake() has been performed, but the caller is responsible - for invoking start_debugging(). - """ - - assert self.start_method.startswith("attach_socket_") - - ns = Session(pid=self.pid, ptvsd_port=self.ptvsd_port) - try: - ns._setup_session(**kwargs) - ns.ignore_unobserved = list(self.ignore_unobserved) - ns.debug_options = set(self.debug_options) - ns.rules = list(self.rules) - ns.process = self.process - - ns.connect() - ns.connected.wait() - ns.handshake() - except Exception: - ns.close() - raise - else: - return ns - - -class CapturedOutput(object): - """Captured stdout and stderr of the debugged process. - """ - - def __init__(self, session): - self.session = session - self._lock = threading.Lock() - self._lines = {} - self._worker_threads = [] - - def __str__(self): - return fmt("CapturedOutput({0!r})", self.session) - - def _worker(self, pipe, name): - lines = self._lines[name] - while True: - try: - line = pipe.readline() - except Exception: - line = None - - if line: - log.info("{0} {1}> {2!r}", self.session, name, line) - with self._lock: - lines.append(line) - else: - break - - def _capture(self, pipe, name): - assert name not in self._lines - self._lines[name] = [] - - thread = threading.Thread( - target=lambda: self._worker(pipe, name), - name=fmt("{0} {1}", self, name) - ) - thread.daemon = True - thread.start() - self._worker_threads.append(thread) - - def capture(self, process): - """Start capturing stdout and stderr of the process. - """ - assert not self._worker_threads - log.info('Capturing {0} stdout and stderr', self.session) - self._capture(process.stdout, "stdout") - self._capture(process.stderr, "stderr") - - def wait(self, timeout=None): - """Wait for all remaining output to be captured. - """ - if not self._worker_threads: - return - log.debug('Waiting for remaining {0} stdout and stderr...', self.session) - for t in self._worker_threads: - t.join(timeout) - self._worker_threads[:] = [] - - def _output(self, which, encoding, lines): - assert self.session.timeline.is_frozen - - try: - result = self._lines[which] - except KeyError: - raise AssertionError(fmt("{0} was not captured for {1}", which, self.session)) - - # The list might still be appended to concurrently, so take a snapshot of it. - with self._lock: - result = list(result) - - if encoding is not None: - result = [s.decode(encoding) for s in result] - - if not lines: - sep = b'' if encoding is None else u'' - result = sep.join(result) - - return result - - def stdout(self, encoding=None): - """Returns stdout captured from the debugged process, as a single string. - - If encoding is None, returns bytes. Otherwise, returns unicode. - """ - return self._output("stdout", encoding, lines=False) - - def stderr(self, encoding=None): - """Returns stderr captured from the debugged process, as a single string. - - If encoding is None, returns bytes. Otherwise, returns unicode. - """ - return self._output("stderr", encoding, lines=False) - - def stdout_lines(self, encoding=None): - """Returns stdout captured from the debugged process, as a list of lines. - - If encoding is None, each line is bytes. Otherwise, each line is unicode. - """ - return self._output("stdout", encoding, lines=True) - - def stderr_lines(self, encoding=None): - """Returns stderr captured from the debugged process, as a list of lines. - - If encoding is None, each line is bytes. Otherwise, each line is unicode. - """ - return self._output("stderr", encoding, lines=True) - - -class BackChannel(object): - TIMEOUT = 20 - - def __init__(self, session): - self.session = session - self.port = None - self._established = threading.Event() - self._socket = None - self._server_socket = None - - def __str__(self): - return fmt("backchannel-{0}", self.session.id) - - def listen(self): - self._server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - self._server_socket.settimeout(self.TIMEOUT) - self._server_socket.bind(('localhost', 0)) - _, self.port = self._server_socket.getsockname() - self._server_socket.listen(0) - - def accept_worker(): - log.info('Listening for incoming connection from {0} on port {1}...', self, self.port) - - try: - self._socket, _ = self._server_socket.accept() - except socket.timeout: - raise log.exception("Timed out waiting for {0} to connect", self) - - self._socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) - log.info('Incoming connection from {0} accepted.', self) - self._setup_stream() - - accept_thread = threading.Thread( - target=accept_worker, - name=fmt('{0} listener', self) - ) - accept_thread.daemon = True - accept_thread.start() - - def _setup_stream(self): - self._stream = messaging.JsonIOStream.from_socket(self._socket, name=str(self)) - self._established.set() - - def receive(self): - self._established.wait() - return self._stream.read_json() - - def send(self, value): - self._established.wait() - self.session.timeline.unfreeze() - t = self.session.timeline.mark(('sending', value)) - self._stream.write_json(value) - return t - - def expect(self, expected): - actual = self.receive() - assert expected == actual, fmt( - "Test expected {0!r} on backchannel, but got {1!r} from the debuggee", - expected, - actual, - ) - - def close(self): - if self._socket: - log.debug('Closing {0} socket of {1}...', self, self.session) - try: - self._socket.shutdown(socket.SHUT_RDWR) - except Exception: - pass - self._socket = None - - if self._server_socket: - log.debug('Closing {0} server socket of {1}...', self, self.session) - try: - self._server_socket.shutdown(socket.SHUT_RDWR) - except Exception: - pass - self._server_socket = None - - -class ScratchPad(object): - def __init__(self, session): - self.session = session - - def __getitem__(self, key): - raise NotImplementedError - - def __setitem__(self, key, value): - """Sets debug_me.scratchpad[key] = value inside the debugged process. - """ - - stackTrace_responses = self.session.all_occurrences_of( - Response(Request("stackTrace")) - ) - assert stackTrace_responses, ( - 'scratchpad requires at least one "stackTrace" request in the timeline.' - ) - stack_trace = stackTrace_responses[-1].body - frame_id = stack_trace["stackFrames"][0]["id"] - - log.info("{0} debug_me.scratchpad[{1!r}] = {2!r}", self.session, key, value) - expr = fmt( - "__import__('debug_me').scratchpad[{0!r}] = {1!r}", - key, - value, - ) - self.session.request( - "evaluate", - { - "frameId": frame_id, - "context": "repl", - "expression": expr, - }, - ) diff --git a/tests/ptvsd/server/test_args.py b/tests/ptvsd/server/test_args.py index b0fc5b5fb..46ab4c561 100644 --- a/tests/ptvsd/server/test_args.py +++ b/tests/ptvsd/server/test_args.py @@ -7,24 +7,24 @@ import pytest from tests import debug +from tests.debug import targets from tests.patterns import some -@pytest.mark.parametrize("run_as", ["program", "module", "code"]) -def test_args(pyfile, start_method, run_as): +@pytest.mark.parametrize("target", targets.all) +def test_args(pyfile, target, run): @pyfile def code_to_debug(): from debug_me import backchannel import sys + backchannel.send(sys.argv) - with debug.Session(start_method, backchannel=True) as session: - expected = ["--arg1", "arg2", "-arg3", "--", "arg4", "-a"] - session.configure( - run_as, code_to_debug, - args=expected - ) - session.start_debugging() + args = ["--arg1", "arg2", "-arg3", "--", "arg4", "-a"] - argv = session.backchannel.receive() - assert argv == [some.str] + expected + with debug.Session() as session: + backchannel = session.open_backchannel() + with run(session, target(code_to_debug, args=args)): + pass + argv = backchannel.receive() + assert argv == [some.str] + args diff --git a/tests/ptvsd/server/test_attach.py b/tests/ptvsd/server/test_attach.py index 2bab2de50..41b37ff0a 100644 --- a/tests/ptvsd/server/test_attach.py +++ b/tests/ptvsd/server/test_attach.py @@ -6,31 +6,63 @@ import pytest -from tests import debug, test_data -from tests.debug import start_methods +from tests import debug +from tests.debug import runners, targets from tests.patterns import some from tests.timeline import Event -@pytest.mark.parametrize("wait_for_attach", ["wait_for_attach", ""]) -@pytest.mark.parametrize("is_attached", ["is_attached", ""]) @pytest.mark.parametrize("stop_method", ["break_into_debugger", "pause"]) -def test_attach(run_as, wait_for_attach, is_attached, stop_method): - attach1_py = test_data / "attach" / "attach1.py" - - with debug.Session("custom_server", backchannel=True) as session: - session.env.update({ - "ATTACH1_TEST_PORT": str(session.ptvsd_port), - "ATTACH1_WAIT_FOR_ATTACH": "1" if wait_for_attach else "0", - "ATTACH1_IS_ATTACHED": "1" if is_attached else "0", - "ATTACH1_BREAK_INTO_DEBUGGER": ( - "1" if stop_method == "break_into_debugger" else "0" - ), - }) - - backchannel = session.backchannel - session.configure(run_as, attach1_py) - session.start_debugging() +@pytest.mark.parametrize("is_attached", ["is_attached", ""]) +@pytest.mark.parametrize("wait_for_attach", ["wait_for_attach", ""]) +@pytest.mark.parametrize("target", targets.all) +def test_attach_api(pyfile, target, wait_for_attach, is_attached, stop_method): + @pyfile + def code_to_debug(): + from debug_me import backchannel, ptvsd, scratchpad + import sys + import time + + _, host, port, wait_for_attach, is_attached, stop_method = sys.argv + port = int(port) + ptvsd.enable_attach((host, port)) + + if wait_for_attach: + backchannel.send("wait_for_attach") + ptvsd.wait_for_attach() + + if is_attached: + backchannel.send("is_attached") + while not ptvsd.is_attached(): + print("looping until is_attached") + time.sleep(0.1) + + if stop_method == "break_into_debugger": + backchannel.send("break_into_debugger?") + assert backchannel.receive() == "proceed" + ptvsd.break_into_debugger() + print("break") # @break_into_debugger + else: + scratchpad["paused"] = False + backchannel.send("loop?") + assert backchannel.receive() == "proceed" + while not scratchpad["paused"]: + print("looping until paused") + time.sleep(0.1) + + with debug.Session() as session: + host, port = runners.attach_by_socket.host, runners.attach_by_socket.port + session.config.update({"host": host, "port": port}) + + backchannel = session.open_backchannel() + session.spawn_debuggee( + [code_to_debug, host, port, wait_for_attach, is_attached, stop_method] + ) + session.wait_for_enable_attach() + + session.connect_to_adapter((host, port)) + with session.request_attach(): + pass if wait_for_attach: assert backchannel.receive() == "wait_for_attach" @@ -41,9 +73,9 @@ def test_attach(run_as, wait_for_attach, is_attached, stop_method): if stop_method == "break_into_debugger": assert backchannel.receive() == "break_into_debugger?" backchannel.send("proceed") - session.wait_for_stop(expected_frames=[ - some.dap.frame(attach1_py, "break_into_debugger") - ]) + session.wait_for_stop( + expected_frames=[some.dap.frame(code_to_debug, "break_into_debugger")] + ) elif stop_method == "pause": assert backchannel.receive() == "loop?" backchannel.send("proceed") @@ -56,11 +88,9 @@ def test_attach(run_as, wait_for_attach, is_attached, stop_method): session.request_continue() -@pytest.mark.parametrize( - "start_method", ["attach_socket_cmdline", "attach_socket_import"] -) -@pytest.mark.skip(reason="https://github.com/microsoft/ptvsd/issues/1594") -def test_reattach(pyfile, start_method, run_as): +@pytest.mark.parametrize("run", runners.all_attach) +@pytest.mark.skip(reason="https://github.com/microsoft/ptvsd/issues/1802") +def test_reattach(pyfile, target, run): @pyfile def code_to_debug(): from debug_me import ptvsd, scratchpad @@ -75,34 +105,31 @@ def code_to_debug(): ptvsd.break_into_debugger() object() # @second - with debug.Session(start_method) as session: - session.configure( - run_as, code_to_debug, - kill_ptvsd=False, - capture_output=False, + with debug.Session() as session1: + session1.captured_output = None + session1.expected_exit_code = None # not expected to exit on disconnect + + with run(session1, target(code_to_debug)): + host, port = session1.config["host"], session1.config["port"] + + session1.wait_for_stop(expected_frames=[some.dap.frame(code_to_debug, "first")]) + session1.disconnect() + + with debug.Session() as session2: + session2.config.update(session1.config) + with session2.connect_to_adapter((host, port)): + pass + + session2.wait_for_stop( + expected_frames=[some.dap.frame(code_to_debug, "second")] ) - session.start_debugging() - session.wait_for_stop(expected_frames=[ - some.dap.frame(code_to_debug, "first"), - ]) - session.request("disconnect") - session.wait_for_disconnect() - - with session.reattach(target=(run_as, code_to_debug)) as session2: - session2.start_debugging() - session2.wait_for_stop(expected_frames=[ - some.dap.frame(code_to_debug, "second"), - ]) - session.scratchpad["exit"] = True - session.request("disconnect") - session.wait_for_disconnect() - - -@pytest.mark.parametrize("start_method", [start_methods.AttachProcessId]) -@pytest.mark.parametrize("run_as", ["program", "module", "code"]) -def test_attaching_by_pid(pyfile, run_as, start_method): + session2.scratchpad["exit"] = True + + +def test_attach_by_pid(pyfile, target): @pyfile def code_to_debug(): + import debug_me # noqa import time def do_something(i): @@ -112,14 +139,11 @@ def do_something(i): for i in range(100): do_something(i) - with debug.Session(start_method) as session: - session.configure(run_as, code_to_debug) - session.set_breakpoints(code_to_debug, all) - session.start_debugging() + with debug.Session() as session: + with session.attach_by_pid(target(code_to_debug), wait=False): + session.set_breakpoints(code_to_debug, all) - session.wait_for_stop(expected_frames=[ - some.dap.frame(code_to_debug, "bp"), - ]) + session.wait_for_stop(expected_frames=[some.dap.frame(code_to_debug, "bp")]) # Remove breakpoint and continue. session.set_breakpoints(code_to_debug, []) diff --git a/tests/ptvsd/server/test_break_into_dbg.py b/tests/ptvsd/server/test_break_into_dbg.py deleted file mode 100644 index f953e1bd7..000000000 --- a/tests/ptvsd/server/test_break_into_dbg.py +++ /dev/null @@ -1,53 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See LICENSE in the project root -# for license information. - -from __future__ import absolute_import, print_function, unicode_literals - -import pytest - -from tests import debug - - -@pytest.mark.parametrize("run_as", ["program", "module", "code"]) -def test_with_wait_for_attach(pyfile, start_method, run_as): - @pyfile - def code_to_debug(): - # NOTE: These tests verify break_into_debugger for launch - # and attach cases. For attach this is always after wait_for_attach - from debug_me import ptvsd - - ptvsd.break_into_debugger() - print("break here") # @break - - with debug.Session(start_method) as session: - session.configure(run_as, code_to_debug) - session.start_debugging() - hit = session.wait_for_stop() - assert hit.frames[0]["line"] == code_to_debug.lines["break"] - - session.request_continue() - - -@pytest.mark.parametrize("run_as", ["program", "module", "code"]) -@pytest.mark.skip(reason="https://github.com/microsoft/ptvsd/issues/1505") -def test_breakpoint_function(pyfile, start_method, run_as): - @pyfile - def code_to_debug(): - # NOTE: These tests verify break_into_debugger for launch - # and attach cases. For attach this is always after wait_for_attach - import debug_me # noqa - - # TODO: use ptvsd.break_into_debugger() on <3.7 - breakpoint() # noqa - print("break here") # @break - - with debug.Session(start_method) as session: - session.configure(run_as, code_to_debug) - session.start_debugging() - hit = session.wait_for_stop() - path = hit.frames[0]["source"]["path"] - assert path.endswith("code_to_debug.py") or path.endswith("") - assert hit.frames[0]["line"] == code_to_debug.lines["break"] - - session.request_continue() diff --git a/tests/ptvsd/server/test_breakpoints.py b/tests/ptvsd/server/test_breakpoints.py index ff1a6d999..29f4a09d5 100644 --- a/tests/ptvsd/server/test_breakpoints.py +++ b/tests/ptvsd/server/test_breakpoints.py @@ -12,25 +12,23 @@ from ptvsd.common import fmt from tests import debug, test_data -from tests.debug import start_methods +from tests.debug import runners, targets from tests.patterns import some bp_root = test_data / "bp" -def test_path_with_ampersand(start_method, run_as): +def test_path_with_ampersand(target, run): test_py = bp_root / "a&b" / "test.py" - with debug.Session(start_method) as session: - session.configure(run_as, test_py) - session.set_breakpoints(test_py, ["two"]) - session.start_debugging() + with debug.Session() as session: + with run(session, target(test_py)): + session.set_breakpoints(test_py, ["two"]) session.wait_for_stop( "breakpoint", expected_frames=[some.dap.frame(test_py, line="two")] ) - session.request_continue() @@ -41,86 +39,69 @@ def test_path_with_ampersand(start_method, run_as): platform.system() == "Windows" and sys.version_info < (3, 6), reason="https://github.com/Microsoft/ptvsd/issues/1124#issuecomment-459506802", ) -def test_path_with_unicode(start_method, run_as): +def test_path_with_unicode(target, run): test_py = bp_root / "ನನ್ನ_ಸ್ಕ್ರಿಪ್ಟ್.py" - with debug.Session(start_method) as session: - session.configure(run_as, test_py) - session.set_breakpoints(test_py, ["bp"]) - session.start_debugging() + with debug.Session() as session: + with run(session, target(test_py)): + session.set_breakpoints(test_py, ["bp"]) session.wait_for_stop( "breakpoint", expected_frames=[some.dap.frame(test_py, name="ಏನಾದರೂ_ಮಾಡು", line="bp")], ) - session.request_continue() -@pytest.mark.parametrize( - "condition_kind", - [ - "condition", - "hitCondition", - "hitCondition-eq", - "hitCondition-gt", - "hitCondition-ge", - "hitCondition-lt", - "hitCondition-le", - "hitCondition-mod", - ], -) -def test_conditional_breakpoint(pyfile, start_method, run_as, condition_kind): +conditions = { + ("condition", "i==5"): lambda i: i == 5, + ("hitCondition", "5"): lambda i: i == 5, + ("hitCondition", "==5"): lambda i: i == 5, + ("hitCondition", ">5"): lambda i: i > 5, + ("hitCondition", ">=5"): lambda i: i >= 5, + ("hitCondition", "<5"): lambda i: i < 5, + ("hitCondition", "<=5"): lambda i: i <= 5, + ("hitCondition", "%3"): lambda i: i % 3 == 0, +} + + +@pytest.mark.parametrize("condition_kind, condition", list(conditions.keys())) +def test_conditional_breakpoint(pyfile, target, run, condition_kind, condition): + hit = conditions[condition_kind, condition] + @pyfile def code_to_debug(): import debug_me # noqa - for i in range(0, 10): + for i in range(1, 10): print(i) # @bp - condition_property = condition_kind.partition("-")[0] - condition, value, hits = { - "condition": ("i==5", "5", 1), - "hitCondition": ("5", "4", 1), - "hitCondition-eq": ("==5", "4", 1), - "hitCondition-gt": (">5", "5", 5), - "hitCondition-ge": (">=5", "4", 6), - "hitCondition-lt": ("<5", "0", 4), - "hitCondition-le": ("<=5", "0", 5), - "hitCondition-mod": ("%3", "2", 3), - }[condition_kind] - - with debug.Session(start_method) as session: - session.configure(run_as, code_to_debug) - session.request( - "setBreakpoints", - { - "source": {"path": code_to_debug}, - "breakpoints": [ - {"line": code_to_debug.lines["bp"], condition_property: condition} - ], - }, - ) - session.start_debugging() - - session.wait_for_stop( - expected_frames=[some.dap.frame(code_to_debug, line="bp")] - ) - - session.get_variables() - - var_i = session.get_variable("i") - assert var_i == some.dict.containing( - {"name": "i", "type": "int", "value": value, "evaluateName": "i"} - ) + with debug.Session() as session: + with run(session, target(code_to_debug)): + session.request( + "setBreakpoints", + { + "source": {"path": code_to_debug}, + "breakpoints": [ + {"line": code_to_debug.lines["bp"], condition_kind: condition} + ], + }, + ) - session.request_continue() - for i in range(1, hits): - session.wait_for_stop() + for i in range(1, 10): + if not hit(i): + continue + session.wait_for_stop( + expected_frames=[some.dap.frame(code_to_debug, line="bp")] + ) + var_i = session.get_variable("i") + assert var_i == some.dict.containing( + {"name": "i", "evaluateName": "i", "type": "int", "value": str(i)} + ) session.request_continue() -def test_crossfile_breakpoint(pyfile, start_method, run_as): +def test_crossfile_breakpoint(pyfile, target, run): @pyfile def script1(): import debug_me # noqa @@ -136,52 +117,48 @@ def script2(): script1.do_something() # @bp print("Done") - with debug.Session(start_method) as session: - session.configure(run_as, script2) - session.set_breakpoints(script1, all) - session.set_breakpoints(script2, all) - session.start_debugging() + with debug.Session() as session: + with run(session, target(script2)): + session.set_breakpoints(script1, all) + session.set_breakpoints(script2, all) session.wait_for_stop(expected_frames=[some.dap.frame(script2, line="bp")]) - session.request_continue() session.wait_for_stop(expected_frames=[some.dap.frame(script1, line="bp")]) - session.request_continue() +# NameError in condition is a special case: pydevd is configured to skip traceback for +# name errors. See https://github.com/microsoft/ptvsd/issues/853 for more details. For +# all other errors, we should be printing traceback. @pytest.mark.parametrize("error_name", ["NameError", ""]) -def test_error_in_condition(pyfile, start_method, run_as, error_name): - @pyfile - def code_to_debug(): - import debug_me # noqa - - for i in range(1, 10): # @bp - pass - +def test_error_in_condition(pyfile, target, run, error_name): error_name = error_name or "ZeroDivisionError" - # NOTE: NameError in condition, is a special case. Pydevd is configured to skip - # traceback for name errors. See https://github.com/Microsoft/ptvsd/issues/853 - # for more details. For all other errors we should be printing traceback. condition, expect_traceback = { "NameError": ("no_such_name", False), "ZeroDivisionError": ("1 / 0", True), }[error_name] - with debug.Session(start_method) as session: - session.configure(run_as, code_to_debug) - session.request( - "setBreakpoints", - { - "source": {"path": code_to_debug}, - "breakpoints": [ - {"line": code_to_debug.lines["bp"], "condition": condition} - ], - }, - ) - session.start_debugging() + @pyfile + def code_to_debug(): + import debug_me # noqa + + for i in range(1, 10): # @bp + pass + + with debug.Session() as session: + with run(session, target(code_to_debug)): + session.request( + "setBreakpoints", + { + "source": {"path": code_to_debug}, + "breakpoints": [ + {"line": code_to_debug.lines["bp"], "condition": condition} + ], + }, + ) assert not session.captured_stdout() @@ -193,7 +170,7 @@ def code_to_debug(): @pytest.mark.parametrize("condition", ["condition", ""]) -def test_log_point(pyfile, start_method, run_as, condition): +def test_log_point(pyfile, target, run, condition): @pyfile def code_to_debug(): import debug_me # noqa @@ -205,21 +182,18 @@ def code_to_debug(): () # @wait_for_output lines = code_to_debug.lines - with debug.Session(start_method) as session: - session.configure(run_as, code_to_debug) - - bp = {"line": lines["bp"], "logMessage": "{i}"} - if condition: - bp["condition"] = "i == 5" - - session.request( - "setBreakpoints", - { - "source": {"path": code_to_debug}, - "breakpoints": [bp, {"line": lines["wait_for_output"]}], - }, - ) - session.start_debugging() + with debug.Session() as session: + with run(session, target(code_to_debug)): + bp = {"line": lines["bp"], "logMessage": "{i}"} + if condition: + bp["condition"] = "i == 5" + session.request( + "setBreakpoints", + { + "source": {"path": code_to_debug}, + "breakpoints": [bp, {"line": lines["wait_for_output"]}], + }, + ) if condition: session.wait_for_stop( @@ -253,24 +227,25 @@ def code_to_debug(): assert session.output("stderr") == some.str.matching(expected_stderr) -def test_package_launch(): - cwd = test_data / "testpkgs" - test_py = cwd / "pkg1" / "__main__.py" +@pytest.mark.parametrize("run", runners.all_launch) +def test_package_launch(run): + testpkgs = test_data / "testpkgs" + main_py = testpkgs / "pkg1" / "__main__.py" - with debug.Session(start_methods.Launch) as session: + with debug.Session() as session: session.expected_exit_code = 42 - session.configure("module", "pkg1", cwd=cwd) - session.set_breakpoints(test_py, ["two"]) - session.start_debugging() + session.config["cwd"] = testpkgs.strpath + + with run(session, targets.Module(name="pkg1")): + session.set_breakpoints(main_py, ["two"]) session.wait_for_stop( - "breakpoint", expected_frames=[some.dap.frame(test_py, line="two")] + "breakpoint", expected_frames=[some.dap.frame(main_py, line="two")] ) - session.request_continue() -def test_add_and_remove_breakpoint(pyfile, start_method, run_as): +def test_add_and_remove_breakpoint(pyfile, target, run): @pyfile def code_to_debug(): import debug_me # noqa @@ -279,10 +254,9 @@ def code_to_debug(): print(i) # @bp () # @wait_for_output - with debug.Session(start_method) as session: - session.configure(run_as, code_to_debug) - session.set_breakpoints(code_to_debug, all) - session.start_debugging() + with debug.Session() as session: + with run(session, target(code_to_debug)): + session.set_breakpoints(code_to_debug, all) session.wait_for_stop( "breakpoint", expected_frames=[some.dap.frame(code_to_debug, line="bp")] @@ -302,27 +276,27 @@ def code_to_debug(): assert session.output("stdout") == expected_stdout -def test_breakpoints_in_filtered_files(pyfile, run_as, start_method): +def test_breakpoint_in_nonexistent_file(pyfile, target, run): @pyfile def code_to_debug(): import debug_me # noqa - with debug.Session(start_method) as session: - session.configure(run_as, code_to_debug) - - breakpoints = session.set_breakpoints("invalid_file.py", [1]) - assert breakpoints == [ - { - "verified": False, - "message": "Breakpoint in file that does not exist.", - "source": some.dict.containing({"path": some.path("invalid_file.py")}), - "line": 1, - } - ] - session.start_debugging() - - -def test_invalid_breakpoints(pyfile, start_method, run_as): + with debug.Session() as session: + with run(session, target(code_to_debug)): + breakpoints = session.set_breakpoints("nonexistent_file.py", [1]) + assert breakpoints == [ + { + "verified": False, + "message": "Breakpoint in file that does not exist.", + "source": some.dict.containing( + {"path": some.path("nonexistent_file.py")} + ), + "line": 1, + } + ] + + +def test_invalid_breakpoints(pyfile, target, run): @pyfile def code_to_debug(): import debug_me # noqa @@ -345,26 +319,26 @@ def code_to_debug(): 4, 5, 6) # fmt: on - with debug.Session(start_method) as session: - session.configure(run_as, code_to_debug) + with debug.Session() as session: + with run(session, target(code_to_debug)): + bp_markers = ["bp1-requested", "bp2-requested", "bp3-requested"] + if sys.version_info < (3,): + bp_markers += ["bp4-requested-1", "bp4-requested-2"] - bp_markers = ["bp1-requested", "bp2-requested", "bp3-requested"] - if sys.version_info < (3,): - bp_markers += ["bp4-requested-1", "bp4-requested-2"] + bps = session.set_breakpoints(code_to_debug, bp_markers) + actual_lines = [bp["line"] for bp in bps] - bps = session.set_breakpoints(code_to_debug, bp_markers) - actual_lines = [bp["line"] for bp in bps] + expected_markers = ["bp1-expected", "bp2-expected", "bp3-expected"] + if sys.version_info < (3,): + expected_markers += ["bp4-expected", "bp4-expected"] + expected_lines = [ + code_to_debug.lines[marker] for marker in expected_markers + ] - expected_markers = ["bp1-expected", "bp2-expected", "bp3-expected"] - if sys.version_info < (3,): - expected_markers += ["bp4-expected", "bp4-expected"] - expected_lines = [code_to_debug.lines[marker] for marker in expected_markers] - - assert actual_lines == expected_lines + assert actual_lines == expected_lines # Now let's make sure that we hit all of the expected breakpoints, # and stop where we expect them to be. - session.start_debugging() # If there's multiple breakpoints on the same line, we only stop once, # so remove duplicates first. @@ -379,7 +353,7 @@ def code_to_debug(): session.request_continue() -def test_deep_stacks(pyfile, start_method, run_as): +def test_deep_stacks(pyfile, target, run): @pyfile def code_to_debug(): import debug_me # noqa @@ -392,10 +366,9 @@ def deep_stack(level): deep_stack(100) - with debug.Session(start_method) as session: - session.configure(run_as, code_to_debug) - session.set_breakpoints(code_to_debug, all) - session.start_debugging() + with debug.Session() as session: + with run(session, target(code_to_debug)): + session.set_breakpoints(code_to_debug, all) stop = session.wait_for_stop() assert len(stop.frames) > 100 @@ -405,16 +378,37 @@ def deep_stack(level): for _ in range(5): stack_trace = session.request( "stackTrace", - arguments={ - "threadId": stop.thread_id, - "startFrame": len(frames), - "levels": 25, - }, + {"threadId": stop.thread_id, "startFrame": len(frames), "levels": 25}, ) assert stack_trace["totalFrames"] > 0 frames += stack_trace["stackFrames"] assert stop.frames == frames + session.request_continue() + + +@pytest.mark.parametrize("target", targets.all) +@pytest.mark.parametrize("func", ["breakpoint", "ptvsd.break_into_debugger"]) +def test_break_api(pyfile, target, run, func): + if func == "breakpoint" and sys.version_info < (3, 7): + pytest.skip("breakpoint() was introduced in Python 3.7") + + @pyfile + def code_to_debug(): + from debug_me import ptvsd # noqa + import sys + + func = eval(sys.argv[1]) + func() + print("break here") # @break + with debug.Session() as session: + target = target(code_to_debug, args=[func]) + with run(session, target): + pass + + session.wait_for_stop( + expected_frames=[some.dap.frame(target.source, target.lines["break"])] + ) session.request_continue() diff --git a/tests/ptvsd/server/test_completions.py b/tests/ptvsd/server/test_completions.py index 5bb7730e4..aa990e9cb 100644 --- a/tests/ptvsd/server/test_completions.py +++ b/tests/ptvsd/server/test_completions.py @@ -29,8 +29,8 @@ } -@pytest.mark.parametrize("bp_label", sorted(expected_at_line.keys())) -def test_completions_scope(pyfile, bp_label, start_method, run_as): +@pytest.mark.parametrize("line", sorted(expected_at_line.keys())) +def test_completions_scope(pyfile, line, target, run): @pyfile def code_to_debug(): import debug_me # noqa @@ -50,28 +50,23 @@ def someFunction(someVar): someFunction("value") print("done") # @done - expected = expected_at_line[bp_label] + expected = sorted(expected_at_line[line], key=lambda t: t["label"]) - with debug.Session(start_method) as session: - session.configure(run_as, code_to_debug) + with debug.Session() as session: + with run(session, target(code_to_debug)): + session.set_breakpoints(code_to_debug, [code_to_debug.lines[line]]) - session.set_breakpoints(code_to_debug, [code_to_debug.lines[bp_label]]) - session.start_debugging() - - hit = session.wait_for_stop(reason="breakpoint") - resp_completions = session.send_request( - "completions", arguments={"text": "some", "frameId": hit.frame_id, "column": 5} - ).wait_for_response() - targets = resp_completions.body["targets"] + stop = session.wait_for_stop("breakpoint") + completions = session.request( + "completions", {"text": "some", "frameId": stop.frame_id, "column": 5} + ) + targets = sorted(completions["targets"], key=lambda t: t["label"]) + assert targets == expected session.request_continue() - targets.sort(key=lambda t: t["label"]) - expected.sort(key=lambda t: t["label"]) - assert targets == expected - -def test_completions_cases(pyfile, start_method, run_as): +def test_completions_cases(pyfile, target, run): @pyfile def code_to_debug(): import debug_me # noqa @@ -81,52 +76,39 @@ def code_to_debug(): c = 3 print([a, b, c]) # @break - with debug.Session(start_method) as session: - session.configure(run_as, code_to_debug) - session.set_breakpoints(code_to_debug, [code_to_debug.lines["break"]]) - session.start_debugging() - hit = session.wait_for_stop() + with debug.Session() as session: + with run(session, target(code_to_debug)): + session.set_breakpoints(code_to_debug, [code_to_debug.lines["break"]]) - response = session.send_request( - "completions", - arguments={"frameId": hit.frame_id, "text": "b.", "column": 3}, - ).wait_for_response() + stop = session.wait_for_stop() - labels = set(target["label"] for target in response.body["targets"]) - assert labels.issuperset( - ["get", "items", "keys", "setdefault", "update", "values"] + completions = session.request( + "completions", {"frameId": stop.frame_id, "text": "b.", "column": 3} ) + labels = set(target["label"] for target in completions["targets"]) + assert labels >= {"get", "items", "keys", "setdefault", "update", "values"} - response = session.send_request( + completions = session.request( "completions", - arguments={ - "frameId": hit.frame_id, - "text": "x = b.setdefault", - "column": 13, - }, - ).wait_for_response() - - assert response.body["targets"] == [ + {"frameId": stop.frame_id, "text": "x = b.setdefault", "column": 13}, + ) + assert completions["targets"] == [ {"label": "setdefault", "length": 6, "start": 6, "type": "function"} ] - response = session.send_request( - "completions", - arguments={"frameId": hit.frame_id, "text": "not_there", "column": 10}, - ).wait_for_response() - - assert not response.body["targets"] + completions = session.request( + "completions", {"frameId": stop.frame_id, "text": "not_there", "column": 10} + ) + assert not completions["targets"] - # Check errors - with pytest.raises(messaging.MessageHandlingError) as error: - response = session.send_request( + with pytest.raises(messaging.MessageHandlingError): + completions = session.request( "completions", - arguments={ - "frameId": 9999999, # frameId not available. + { + "frameId": 9999999, # nonexistent frameId "text": "not_there", "column": 10, }, - ).wait_for_response() - assert "Wrong ID sent from the client:" in str(error) + ) session.request_continue() diff --git a/tests/ptvsd/server/test_disconnect.py b/tests/ptvsd/server/test_disconnect.py index 2bd49e606..674b01ef7 100644 --- a/tests/ptvsd/server/test_disconnect.py +++ b/tests/ptvsd/server/test_disconnect.py @@ -8,38 +8,34 @@ import pytest from tests import debug -from tests.debug import start_methods +from tests.debug import runners from tests.patterns import some @pytest.mark.parametrize( - "start_method", [start_methods.AttachSocketCmdLine, start_methods.AttachSocketImport] + "run", [runners.attach_by_socket["api"], runners.attach_by_socket["cli"]] ) -def test_continue_on_disconnect_for_attach(pyfile, start_method, run_as): +def test_continue_on_disconnect_for_attach(pyfile, target, run): @pyfile def code_to_debug(): from debug_me import backchannel backchannel.send("continued") # @bp - with debug.Session(start_method, backchannel=True) as session: - backchannel = session.backchannel - session.configure(run_as, code_to_debug) - session.set_breakpoints(code_to_debug, all) - session.start_debugging() + with debug.Session() as session: + backchannel = session.open_backchannel() + with run(session, target(code_to_debug)): + session.set_breakpoints(code_to_debug, all) session.wait_for_stop( - "breakpoint", - expected_frames=[ - some.dap.frame(code_to_debug, line="bp"), - ], + "breakpoint", expected_frames=[some.dap.frame(code_to_debug, line="bp")] ) - session.request("disconnect") + session.disconnect() assert "continued" == backchannel.receive() -@pytest.mark.parametrize("start_method", [start_methods.Launch]) -def test_exit_on_disconnect_for_launch(pyfile, start_method, run_as): +@pytest.mark.parametrize("run", runners.all_launch) +def test_exit_on_disconnect_for_launch(pyfile, target, run): @pyfile def code_to_debug(): import debug_me # noqa @@ -50,19 +46,15 @@ def code_to_debug(): with open(fp, "w") as f: print("Should not continue after disconnect on launch", file=f) - with debug.Session(start_method) as session: + with debug.Session() as session: session.expected_exit_code = some.int - session.configure(run_as, code_to_debug) - session.set_breakpoints(code_to_debug, all) - session.start_debugging() + with run(session, target(code_to_debug)): + session.set_breakpoints(code_to_debug, all) session.wait_for_stop( - "breakpoint", - expected_frames=[ - some.dap.frame(code_to_debug, line="bp"), - ], + "breakpoint", expected_frames=[some.dap.frame(code_to_debug, line="bp")] ) - session.request("disconnect") + session.disconnect() fp = os.path.join(os.path.dirname(os.path.abspath(code_to_debug)), "here.txt") assert not os.path.exists(fp) diff --git a/tests/ptvsd/server/test_django.py b/tests/ptvsd/server/test_django.py index 485b7f74b..988800e7e 100644 --- a/tests/ptvsd/server/test_django.py +++ b/tests/ptvsd/server/test_django.py @@ -7,12 +7,12 @@ import pytest from ptvsd.common import compat from tests import code, debug, log, net, test_data -from tests.debug import start_methods +from tests.debug import runners, targets from tests.patterns import some pytestmark = pytest.mark.timeout(60) -django = net.WebServer(net.get_test_server_port(8000, 8100)) +django_server = net.WebServer(net.get_test_server_port(8000, 8100)) class paths: @@ -26,44 +26,40 @@ class lines: app_py = code.get_marked_line_numbers(paths.app_py) -def _initialize_session(session, multiprocess=None, exit_code=0): - if multiprocess: - pytest.skip("https://github.com/microsoft/ptvsd/issues/1706") +@pytest.fixture +@pytest.mark.parametrize("run", [runners.launch, runners.attach_by_socket["cli"]]) +def start_django(run): + def start(session, multiprocess=False): + if multiprocess: + pytest.skip("https://github.com/microsoft/ptvsd/issues/1706") - args = ["runserver"] - if not multiprocess: - args += ["--noreload"] - args += ["--", str(django.port)] + session.expected_exit_code = some.int + session.config.update({"django": True, "subProcess": bool(multiprocess)}) - session.expected_exit_code = exit_code - session.configure( - "program", - paths.app_py, - cwd=paths.django1, - subProcess=multiprocess, - args=args, - django=True, - ) + args = ["runserver"] + if not multiprocess: + args += ["--noreload"] + args += ["--", str(django_server.port)] + + return run(session, targets.Program(paths.app_py, args), cwd=paths.django1) + + return start -@pytest.mark.parametrize( - "start_method", [start_methods.Launch, start_methods.AttachSocketCmdLine] -) @pytest.mark.parametrize("bp_target", ["code", "template"]) -def test_django_breakpoint_no_multiproc(start_method, bp_target): +def test_django_breakpoint_no_multiproc(start_django, bp_target): bp_file, bp_line, bp_name = { "code": (paths.app_py, lines.app_py["bphome"], "home"), "template": (paths.hello_html, 8, "Django Template"), }[bp_target] bp_var_content = compat.force_str("Django-Django-Test") - with debug.Session(start_method) as session: - _initialize_session(session, exit_code=some.int) - session.set_breakpoints(bp_file, [bp_line]) - session.start_debugging() + with debug.Session() as session: + with start_django(session): + session.set_breakpoints(bp_file, [bp_line]) - with django: - home_request = django.get("/home") + with django_server: + home_request = django_server.get("/home") session.wait_for_stop( "breakpoint", expected_frames=[ @@ -87,17 +83,13 @@ def test_django_breakpoint_no_multiproc(start_method, bp_target): assert bp_var_content in home_request.response_text() -@pytest.mark.parametrize( - "start_method", [start_methods.Launch, start_methods.AttachSocketCmdLine] -) -def test_django_template_exception_no_multiproc(start_method): - with debug.Session(start_method) as session: - _initialize_session(session, exit_code=some.int) - session.request("setExceptionBreakpoints", {"filters": ["raised", "uncaught"]}) - session.start_debugging() +def test_django_template_exception_no_multiproc(start_django): + with debug.Session() as session: + with start_django(session): + session.request("setExceptionBreakpoints", {"filters": ["raised", "uncaught"]}) - with django: - django.get("/badtemplate", log_errors=False) + with django_server: + django_server.get("/badtemplate", log_errors=False) stop = session.wait_for_stop( "exception", expected_frames=[ @@ -134,20 +126,16 @@ def test_django_template_exception_no_multiproc(start_method): session.request_continue() -@pytest.mark.parametrize( - "start_method", [start_methods.Launch, start_methods.AttachSocketCmdLine] -) @pytest.mark.parametrize("exc_type", ["handled", "unhandled"]) -def test_django_exception_no_multiproc(start_method, exc_type): +def test_django_exception_no_multiproc(start_django, exc_type): exc_line = lines.app_py["exc_" + exc_type] - with debug.Session(start_method) as session: - _initialize_session(session, exit_code=some.int) - session.request("setExceptionBreakpoints", {"filters": ["raised", "uncaught"]}) - session.start_debugging() + with debug.Session() as session: + with start_django(session): + session.request("setExceptionBreakpoints", {"filters": ["raised", "uncaught"]}) - with django: - django.get("/" + exc_type) + with django_server: + django_server.get("/" + exc_type) stopped = session.wait_for_stop( "exception", expected_frames=[ @@ -186,22 +174,21 @@ def test_django_exception_no_multiproc(start_method, exc_type): session.request_continue() -@pytest.mark.parametrize("start_method", [start_methods.Launch]) -def test_django_breakpoint_multiproc(start_method): +def test_django_breakpoint_multiproc(start_django): bp_line = lines.app_py["bphome"] bp_var_content = compat.force_str("Django-Django-Test") - with debug.Session(start_method) as parent_session: - _initialize_session(parent_session, multiprocess=True, exit_code=some.int) - parent_session.set_breakpoints(paths.app_py, [bp_line]) - parent_session.start_debugging() + with debug.Session() as parent_session: + with start_django(parent_session, multiprocess=True): + parent_session.set_breakpoints(paths.app_py, [bp_line]) - with parent_session.attach_to_next_subprocess() as child_session: - child_session.set_breakpoints(paths.app_py, [bp_line]) - child_session.start_debugging() + child_pid = parent_session.wait_for_next_subprocess() + with debug.Session() as child_session: + with child_session.attach_by_pid(child_pid): + child_session.set_breakpoints(paths.app_py, [bp_line]) - with django: - home_request = django.get("/home") + with django_server: + home_request = django_server.get("/home") child_session.wait_for_stop( "breakpoint", expected_frames=[ diff --git a/tests/ptvsd/server/test_flask.py b/tests/ptvsd/server/test_flask.py index f80fceafc..13dd298c1 100644 --- a/tests/ptvsd/server/test_flask.py +++ b/tests/ptvsd/server/test_flask.py @@ -10,7 +10,7 @@ from ptvsd.common import compat from tests import code, debug, log, net, test_data -from tests.debug import start_methods +from tests.debug import runners from tests.patterns import some pytestmark = pytest.mark.timeout(60) @@ -60,7 +60,7 @@ def _initialize_session(session, multiprocess=None, exit_code=0): @pytest.mark.parametrize( - "start_method", [start_methods.Launch, start_methods.AttachSocketCmdLine] + "start_method", [runners.launch, runners.attach_by_socket["cli"]] ) @pytest.mark.parametrize("bp_target", ["code", "template"]) def test_flask_breakpoint_no_multiproc(start_method, bp_target): @@ -103,7 +103,7 @@ def test_flask_breakpoint_no_multiproc(start_method, bp_target): @pytest.mark.parametrize( - "start_method", [start_methods.Launch, start_methods.AttachSocketCmdLine] + "start_method", [runners.launch, runners.attach_by_socket["cli"]] ) def test_flask_template_exception_no_multiproc(start_method): with debug.Session(start_method) as session: @@ -160,7 +160,7 @@ def test_flask_template_exception_no_multiproc(start_method): @pytest.mark.parametrize( - "start_method", [start_methods.Launch, start_methods.AttachSocketCmdLine] + "start_method", [runners.launch, runners.attach_by_socket["cli"]] ) @pytest.mark.parametrize("exc_type", ["handled", "unhandled"]) def test_flask_exception_no_multiproc(start_method, exc_type): @@ -213,12 +213,11 @@ def test_flask_exception_no_multiproc(start_method, exc_type): session.request_continue() -@pytest.mark.parametrize("start_method", [start_methods.Launch]) -def test_flask_breakpoint_multiproc(start_method): +def test_flask_breakpoint_multiproc(): bp_line = lines.app_py["bphome"] bp_var_content = compat.force_str("Flask-Jinja-Test") - with debug.Session(start_method) as parent_session: + with debug.Session(runners.launch) as parent_session: # No clean way to kill Flask server _initialize_session(parent_session, multiprocess=True, exit_code=some.int) parent_session.set_breakpoints(paths.app_py, [bp_line]) diff --git a/tests/ptvsd/server/test_log.py b/tests/ptvsd/server/test_log.py index 868cde92c..c8a5863c9 100644 --- a/tests/ptvsd/server/test_log.py +++ b/tests/ptvsd/server/test_log.py @@ -7,57 +7,64 @@ import contextlib import pytest -from ptvsd.common import compat from tests import debug -from tests.debug import start_methods +from tests.debug import runners, targets @contextlib.contextmanager -def check_logs(tmpdir, session): - assert not tmpdir.listdir("ptvsd-*.log") - yield - assert len(tmpdir.listdir("ptvsd-*.log")) == 1 - log_name = "ptvsd-{}.log".format(session.pid) - assert tmpdir.join(log_name).size() > 0 +def check_logs(tmpdir, run): + expected_logs = { + "ptvsd.adapter-*.log": 1, + "ptvsd.launcher-*.log": 1 if run.request == "launch" else 0, + # For attach_by_pid, there's ptvsd.server process that performs the injection, + # and then there's the debug server that is injected into the debuggee. + "ptvsd.server-*.log": 2 if type(run).__name__ == "attach_by_pid" else 1, + } + + actual_logs = lambda: { + filename: len(tmpdir.listdir(filename)) for filename in expected_logs + } + assert actual_logs() == {filename: 0 for filename in expected_logs} + yield + assert actual_logs() == expected_logs -@pytest.mark.parametrize("cli", ["arg", "env"]) -def test_log_cli(pyfile, tmpdir, start_method, run_as, cli): - if cli == "arg" and start_method == "attach_socket_import": - pytest.skip() +@pytest.mark.parametrize("target", targets.all) +@pytest.mark.parametrize("method", ["api", "cli"]) +def test_log_dir(pyfile, tmpdir, target, method): @pyfile def code_to_debug(): import debug_me # noqa - with debug.Session(start_method) as session: - with check_logs(tmpdir, session): - env = {} - if cli == "arg": - session.log_dir = str(tmpdir) - else: - env["PTVSD_LOG_DIR"] = str(tmpdir) - session.configure(run_as, code_to_debug, env=env) - session.start_debugging() + # Depending on the method, attach_by_socket will use either `ptvsd --log-dir ...` + # or `enable_attach(log_dir=) ...`. + run = runners.attach_by_socket[method].with_options(log_dir=tmpdir.strpath) + with check_logs(tmpdir, run): + with debug.Session() as session: + session.log_dir = None + with run(session, target(code_to_debug)): + pass -@pytest.mark.parametrize("start_method", [start_methods.CustomServer]) -def test_log_api(pyfile, tmpdir, start_method, run_as): +@pytest.mark.parametrize("run", runners.all) +@pytest.mark.parametrize("target", targets.all) +def test_log_dir_env(pyfile, tmpdir, run, target): @pyfile def code_to_debug(): - from debug_me import backchannel, ptvsd - port, log_dir = backchannel.receive() - ptvsd.enable_attach(("localhost", port), log_dir=log_dir) - ptvsd.wait_for_attach() - - log_dir = compat.filename(tmpdir) - with debug.Session(start_method, backchannel=True) as session: - backchannel = session.backchannel - - @session.before_connect - def before_connect(): - backchannel.send([session.ptvsd_port, log_dir]) - - with check_logs(tmpdir, session): - session.configure(run_as, code_to_debug) - session.start_debugging() + from debug_me import backchannel # noqa + + assert backchannel.receive() == "proceed" + + with check_logs(tmpdir, run): + with debug.Session() as session: + session.log_dir = None + session.spawn_adapter.env["PTVSD_LOG_DIR"] = tmpdir + if run.request != "launch": + session.spawn_debuggee.env["PTVSD_LOG_DIR"] = tmpdir + + backchannel = session.open_backchannel() + with run(session, target(code_to_debug)): + pass + + backchannel.send("proceed") diff --git a/tests/ptvsd/server/test_multiproc.py b/tests/ptvsd/server/test_multiproc.py index 8955db373..6bd3579c4 100644 --- a/tests/ptvsd/server/test_multiproc.py +++ b/tests/ptvsd/server/test_multiproc.py @@ -10,7 +10,7 @@ from ptvsd.common import messaging from tests import debug -from tests.debug import start_methods +from tests.debug import runners from tests.patterns import some from tests.timeline import Event, Request @@ -23,7 +23,7 @@ platform.system() != "Windows", reason="Debugging multiprocessing module only works on Windows", ) -@pytest.mark.parametrize("start_method", [start_methods.Launch, start_methods.AttachSocketCmdLine]) +@pytest.mark.parametrize("start_method", [runners.launch, runners.attach_by_socket["cli"]]) def test_multiprocessing(pyfile, start_method, run_as): @pyfile def code_to_debug(): @@ -143,7 +143,7 @@ def child(q): @pytest.mark.skipif( sys.version_info < (3, 0) and (platform.system() != "Windows"), reason="Bug #935" ) -@pytest.mark.parametrize("start_method", [start_methods.Launch, start_methods.AttachSocketCmdLine]) +@pytest.mark.parametrize("start_method", [runners.launch, runners.attach_by_socket["cli"]]) def test_subprocess(pyfile, start_method, run_as): @pyfile def child(): @@ -210,7 +210,7 @@ def parent(): @pytest.mark.skipif( sys.version_info < (3, 0) and (platform.system() != "Windows"), reason="Bug #935" ) -@pytest.mark.parametrize("start_method", [start_methods.Launch, start_methods.AttachSocketCmdLine]) +@pytest.mark.parametrize("start_method", [runners.launch, runners.attach_by_socket["cli"]]) def test_autokill(pyfile, start_method, run_as): @pyfile def child(): diff --git a/tests/ptvsd/server/test_run.py b/tests/ptvsd/server/test_run.py index e984d489f..0631d8c73 100644 --- a/tests/ptvsd/server/test_run.py +++ b/tests/ptvsd/server/test_run.py @@ -11,13 +11,13 @@ import ptvsd from ptvsd.common import messaging from tests import debug, test_data -from tests.debug import start_methods +from tests.debug import runners, targets from tests.patterns import some -from tests.timeline import Event -@pytest.mark.parametrize("run_as", ["program", "module", "code"]) -def test_run(pyfile, start_method, run_as): +@pytest.mark.parametrize("run", runners.all) +@pytest.mark.parametrize("target", targets.all) +def test_run(pyfile, target, run): @pyfile def code_to_debug(): from debug_me import backchannel @@ -26,17 +26,17 @@ def code_to_debug(): print("begin") backchannel.send(path.abspath(sys.modules["ptvsd"].__file__)) - backchannel.wait_for("continue") + assert backchannel.receive() == "continue" print("end") - with debug.Session(start_method, backchannel=True) as session: - backchannel = session.backchannel - session.configure(run_as, code_to_debug) - session.start_debugging() + with debug.Session() as session: + backchannel = session.open_backchannel() + with run(session, target(code_to_debug)): + pass expected_ptvsd_path = path.abspath(ptvsd.__file__) - backchannel.expect( - some.str.matching(re.escape(expected_ptvsd_path) + r"(c|o)?") + assert backchannel.receive() == some.str.matching( + re.escape(expected_ptvsd_path) + r"(c|o)?" ) backchannel.send("continue") @@ -44,15 +44,21 @@ def code_to_debug(): session.proceed() -def test_run_submodule(): - with debug.Session(start_methods.Launch, backchannel=True) as session: - session.configure("module", "pkg1.sub", cwd=test_data / "testpkgs") - session.start_debugging() - session.backchannel.expect("ok") +@pytest.mark.parametrize("run", runners.all_launch) +def test_run_submodule(run): + with debug.Session() as session: + session.config["cwd"] = test_data / "testpkgs" + backchannel = session.open_backchannel() + with run(session, targets.Module(name="pkg1.sub")): + pass -@pytest.mark.parametrize("run_as", ["program", "module", "code"]) -def test_nodebug(pyfile, run_as): + assert backchannel.receive() == "ok" + + +@pytest.mark.parametrize("run", runners.all_launch) +@pytest.mark.parametrize("target", targets.all) +def test_nodebug(pyfile, run, target): @pyfile def code_to_debug(): from debug_me import backchannel @@ -60,20 +66,18 @@ def code_to_debug(): backchannel.receive() # @ bp1 print("ok") # @ bp2 - with debug.Session(start_methods.Launch, backchannel=True) as session: - backchannel = session.backchannel - session.configure( - run_as, code_to_debug, noDebug=True, console="internalConsole" - ) + with debug.Session() as session: + session.config["noDebug"] = True + + backchannel = session.open_backchannel() + run(session, target(code_to_debug)) with pytest.raises(messaging.MessageHandlingError): session.set_breakpoints(code_to_debug, all) - session.start_debugging() backchannel.send(None) # Breakpoint shouldn't be hit. + pass - session.expect_realized( - Event("output", some.dict.containing({"category": "stdout", "output": "ok"})) - ) + assert "ok" in session.output("stdout") diff --git a/tests/ptvsd/server/test_start_stop.py b/tests/ptvsd/server/test_start_stop.py index 70e6353b1..245cee86a 100644 --- a/tests/ptvsd/server/test_start_stop.py +++ b/tests/ptvsd/server/test_start_stop.py @@ -11,7 +11,7 @@ from ptvsd.common import log from tests import debug -from tests.debug import start_methods +from tests.debug import runners from tests.patterns import some @@ -44,7 +44,7 @@ def code_to_debug(): ptvsd.break_into_debugger() print() # line on which it'll actually break - with debug.Session(start_methods.Launch) as session: + with debug.Session(runners.launch) as session: session.configure(run_as, code_to_debug, waitOnNormalExit=True) session.start_debugging() @@ -68,7 +68,7 @@ def code_to_debug(): print() # line on which it'll actually break sys.exit(42) - with debug.Session(start_methods.Launch) as session: + with debug.Session(runners.launch) as session: session.expected_exit_code = 42 session.configure(run_as, code_to_debug, waitOnAbnormalExit=True) session.start_debugging() @@ -79,8 +79,7 @@ def code_to_debug(): wait_and_press_key(session) -@pytest.mark.parametrize("start_method", [start_methods.Launch]) -def test_exit_normally_with_wait_on_abnormal_exit_enabled(pyfile, start_method, run_as): +def test_exit_normally_with_wait_on_abnormal_exit_enabled(pyfile, run_as): @pyfile def code_to_debug(): from debug_me import ptvsd @@ -88,7 +87,7 @@ def code_to_debug(): ptvsd.break_into_debugger() print() - with debug.Session(start_method) as session: + with debug.Session(runners.launch) as session: session.configure(run_as, code_to_debug, waitOnAbnormalExit=True) session.start_debugging() diff --git a/tests/ptvsd/server/test_stop_on_entry.py b/tests/ptvsd/server/test_stop_on_entry.py index fac010162..6bbb67ec9 100644 --- a/tests/ptvsd/server/test_stop_on_entry.py +++ b/tests/ptvsd/server/test_stop_on_entry.py @@ -7,7 +7,7 @@ import pytest from tests import debug -from tests.debug import start_methods +from tests.debug import runners from tests.patterns import some @@ -19,7 +19,7 @@ def code_to_debug(): backchannel.send("done") - with debug.Session(start_methods.Launch, backchannel=True) as session: + with debug.Session(runners.launch, backchannel=True) as session: backchannel = session.backchannel session.configure( run_as, code_to_debug, diff --git a/tests/pytest_fixtures.py b/tests/pytest_fixtures.py index 34fad7e7a..ef287fd9e 100644 --- a/tests/pytest_fixtures.py +++ b/tests/pytest_fixtures.py @@ -13,38 +13,65 @@ import threading import types -from ptvsd.common import compat, timestamp +from ptvsd.common import compat, fmt, log, options, timestamp from tests import code, pydevd_log -from tests.debug import start_methods - -__all__ = ['run_as', 'start_method', 'with_pydevd_log', 'daemon', 'pyfile'] +from tests.debug import runners, session, targets # Set up the test matrix for various code types and attach methods. Most tests will # use both run_as and start_method, so the matrix is a cross product of them. -RUN_AS = ['program'] -START_METHODS = [start_methods.Launch, start_methods.AttachSocketImport, start_methods.AttachSocketCmdLine] - -if os.environ.get('PTVSD_SIMPLE_TESTS', '').lower() not in ('1', 'true'): - RUN_AS += ['module'] +if int(os.environ.get("PTVSD_SIMPLE_TESTS", "0")): + TARGETS = [targets.Program] + RUNNERS = [runners.launch, runners.attach_by_socket["cli"]] +else: + TARGETS = targets.all_named + RUNNERS = [ + runners.launch, + runners.attach_by_socket["api"], + runners.attach_by_socket["cli"], + ] -@pytest.fixture(params=RUN_AS) -def run_as(request): +@pytest.fixture(params=TARGETS) +def target(request): return request.param -@pytest.fixture(params=START_METHODS) -def start_method(request): +@pytest.fixture(params=RUNNERS) +def run(request): return request.param @pytest.fixture(autouse=True) -def reset_timestamp(request): +def test_wrapper(request, long_tmpdir): timestamp.reset() - print("\n") # make sure logs start on a new line - yield + + session.Session.tmpdir = long_tmpdir + + original_log_dir = None + try: + if options.log_dir is not None: + original_log_dir = options.log_dir + log_subdir = request.node.name + for ch in r"\/:?*|<>": + log_subdir = log_subdir.replace(ch, fmt("&#{0};", ord(ch))) + options.log_dir += "/" + log_subdir + try: + py.path.local(options.log_dir).remove() + except Exception: + pass + + print("\n") # make sure on-screen logs start on a new line + with log.to_file(prefix="tests"): + log.info("Test {0} started.", request.node.name) + try: + yield + finally: + log.info("Test {0} completed.", request.node.name) + finally: + if original_log_dir is not None: + options.log_dir = original_log_dir @pytest.fixture(autouse=True) @@ -52,8 +79,8 @@ def with_pydevd_log(request, tmpdir): """Enables pydevd logging during the test run, and dumps the log if the test fails. """ - prefix = 'pydevd_debug_file-{0}'.format(os.getpid()) - filename = tempfile.mktemp(suffix='.log', prefix=prefix, dir=str(tmpdir)) + prefix = "pydevd_debug_file-{0}".format(os.getpid()) + filename = tempfile.mktemp(suffix=".log", prefix=prefix, dir=str(tmpdir)) with pydevd_log.enabled(filename): yield @@ -75,7 +102,7 @@ def daemon(request): daemons = [] - def factory(func, name_suffix=''): + def factory(func, name_suffix=""): name = func.__name__ + name_suffix thread = threading.Thread(target=func, name=name) thread.daemon = True @@ -95,10 +122,13 @@ def factory(func, name_suffix=''): assert not thread.is_alive() -if platform.system() != 'Windows': +if platform.system() != "Windows": + @pytest.fixture def long_tmpdir(request, tmpdir): return tmpdir + + else: import ctypes @@ -160,14 +190,14 @@ def factory(source): def_lineno = 0 for line in source: line = line.strip() - if line.startswith('def') and line.endswith(':'): + if line.startswith("def") and line.endswith(":"): break def_lineno += 1 else: - raise ValueError('Failed to locate function header.') + raise ValueError("Failed to locate function header.") # Remove everything up to and including "def". - source = source[def_lineno + 1:] + source = source[def_lineno + 1 :] assert source # Now we need to adjust indentation. Compute how much the first line of @@ -176,11 +206,11 @@ def factory(source): # with, so just replace them with a simple newline. line = source[0] indent = len(line) - len(line.lstrip()) - source = [l[indent:] if l.strip() else '\n' for l in source] - source = ''.join(source) + source = [l[indent:] if l.strip() else "\n" for l in source] + source = "".join(source) # Write it to file. - tmpfile = long_tmpdir / (name + '.py') + tmpfile = long_tmpdir / (name + ".py") tmpfile.strpath = compat.filename(tmpfile.strpath) assert not tmpfile.check() tmpfile.write(source) diff --git a/tests/pytest_hooks.py b/tests/pytest_hooks.py index d08f812de..e0730bcfc 100644 --- a/tests/pytest_hooks.py +++ b/tests/pytest_hooks.py @@ -5,35 +5,28 @@ from __future__ import absolute_import, print_function, unicode_literals import os +import platform import pytest import pytest_timeout -from ptvsd.common import fmt, log -from tests import debug, pydevd_log +from ptvsd.common import fmt, log, options +from tests import pydevd_log def pytest_addoption(parser): parser.addoption( "--ptvsd-logs", action="store_true", - help="Write ptvsd logs to {rootdir}/tests/_logs/", - ) - parser.addoption( - "--pydevd-logs", - action="store_true", - help="Write pydevd logs to {rootdir}/tests/_logs/", + help="Write ptvsd and pydevd logs under {rootdir}/tests/_logs/", ) def pytest_configure(config): - log_dir = config.rootdir / "tests" / "_logs" - if True or config.option.ptvsd_logs: - log.info("ptvsd logs will be in {0}", log_dir) - debug.PTVSD_ENV["PTVSD_LOG_DIR"] = str(log_dir) - if config.option.pydevd_logs: - log.info("pydevd logs will be in {0}", log_dir) - debug.PTVSD_ENV["PYDEVD_DEBUG"] = "True" - debug.PTVSD_ENV["PYDEVD_DEBUG_FILE"] = str(log_dir / "pydevd.log") + if config.option.ptvsd_logs: + options.log_dir = ( + config.rootdir / "tests" / "_logs" / platform.python_version() + ).strpath + log.info("ptvsd and pydevd logs will be under {0}", options.log_dir) def pytest_report_header(config): @@ -53,6 +46,10 @@ def pytest_runtest_makereport(item, call): setattr(item, result.when + "_result", result) +def pytest_make_parametrize_id(config, val): + return getattr(val, "pytest_id", None) + + # If a test times out and pytest tries to print the stacks of where it was hanging, # we want to print the pydevd log as well. This is not a normal pytest hook - we # just detour pytest_timeout.dump_stacks directly. diff --git a/tests/test_data/attach/attach1.py b/tests/test_data/attach/attach1.py deleted file mode 100644 index ff2b35509..000000000 --- a/tests/test_data/attach/attach1.py +++ /dev/null @@ -1,29 +0,0 @@ -from debug_me import backchannel, ptvsd, scratchpad - -import os -import time - -ptvsd.enable_attach(("localhost", int(os.environ["ATTACH1_TEST_PORT"]))) - -if int(os.environ["ATTACH1_WAIT_FOR_ATTACH"]): - backchannel.send("wait_for_attach") - ptvsd.wait_for_attach() - -if int(os.environ["ATTACH1_IS_ATTACHED"]): - backchannel.send("is_attached") - while not ptvsd.is_attached(): - print("looping until is_attached") - time.sleep(0.1) - -if int(os.environ["ATTACH1_BREAK_INTO_DEBUGGER"]): - backchannel.send("break_into_debugger?") - assert backchannel.receive() == "proceed" - ptvsd.break_into_debugger() - print("break") # @break_into_debugger -else: - scratchpad["paused"] = False - backchannel.send("loop?") - assert backchannel.receive() == "proceed" - while not scratchpad["paused"]: - print("looping until paused") - time.sleep(0.1) diff --git a/tests/test_data/django1/app.py b/tests/test_data/django1/app.py index dbc2d6d15..5a5c30e37 100644 --- a/tests/test_data/django1/app.py +++ b/tests/test_data/django1/app.py @@ -1,3 +1,5 @@ +import debug_me # noqa + import os import signal import sys diff --git a/tests/timeline.py b/tests/timeline.py index bf007222c..e907c6454 100644 --- a/tests/timeline.py +++ b/tests/timeline.py @@ -1055,6 +1055,9 @@ class RequestOccurrence(MessageOccurrence): def __init__(self, message): assert isinstance(message, messaging.Request) super(RequestOccurrence, self).__init__(message) + self.response = None + if isinstance(message, messaging.OutgoingRequest): + self.on_response = message.on_response @property def command(self): @@ -1094,6 +1097,7 @@ def __init__(self, request_occ, message): # Assign request first for the benefit of self._key. self.request = request_occ + request_occ.response = self super(ResponseOccurrence, self).__init__(message) @property diff --git a/tests/watchdog/__init__.py b/tests/watchdog/__init__.py index f71bb5def..421c0aa85 100644 --- a/tests/watchdog/__init__.py +++ b/tests/watchdog/__init__.py @@ -62,8 +62,6 @@ def start(): def _dump_worker_log(command, problem, exc_info=None): - assert _worker_log_filename - reason = fmt("{0}.{1}() {2}", _name, command, problem) if _worker_log_filename is None: reason += ", but there is no log." diff --git a/tests/watchdog/worker.py b/tests/watchdog/worker.py index 6eb8d7652..b727e5a00 100644 --- a/tests/watchdog/worker.py +++ b/tests/watchdog/worker.py @@ -33,13 +33,12 @@ def main(tests_pid): # log.stderr_levels |= {"info"} log.timestamp_format = "06.3f" - log.filename_prefix = "watchdog" - log.to_file() + log_file = log.to_file(prefix="tests.watchdog") stream = messaging.JsonIOStream.from_stdio(fmt("tests-{0}", tests_pid)) - log.info("Spawned watchdog-{0} for tests-{0}", tests_pid) + log.info("Spawned WatchDog-{0} for tests-{0}", tests_pid) tests_process = psutil.Process(tests_pid) - stream.write_json(["watchdog", log.filename()]) + stream.write_json(["watchdog", log_file.filename]) spawned_processes = {} # pid -> ProcessInfo try: @@ -62,7 +61,7 @@ def main(tests_pid): pid = int(pid) log.info( - "watchdog-{0} registering spawned process {1} (pid={2})", + "WatchDog-{0} registering spawned process {1} (pid={2})", tests_pid, name, pid, @@ -73,7 +72,7 @@ def main(tests_pid): pass else: log.warning( - "watchdog-{0} already tracks a process with pid={1}: {2}", + "WatchDog-{0} already tracks a process with pid={1}: {2}", tests_pid, pid, old_name, @@ -85,7 +84,7 @@ def main(tests_pid): pid = int(pid) log.info( - "watchdog-{0} unregistering spawned process {1} (pid={2})", + "WatchDog-{0} unregistering spawned process {1} (pid={2})", tests_pid, name, pid, @@ -150,7 +149,7 @@ def main(tests_pid): for proc in leftover_processes: log.warning( - "watchdog-{0} killing orphaned test child process (pid={1})", + "WatchDog-{0} killing orphaned test child process (pid={1})", tests_pid, proc.pid, ) @@ -160,7 +159,7 @@ def main(tests_pid): # gcore will automatically add pid to the filename core_file = os.path.join(tempfile.gettempdir(), "ptvsd_core") gcore_cmd = fmt("gcore -o {0} {1}", core_file, proc.pid) - log.warning("watchdog-{0}: {1}", tests_pid, gcore_cmd) + log.warning("WatchDog-{0}: {1}", tests_pid, gcore_cmd) os.system(gcore_cmd) except Exception: log.exception() @@ -172,7 +171,7 @@ def main(tests_pid): except Exception: log.exception() - log.info("watchdog-{0} exiting", tests_pid) + log.info("WatchDog-{0} exiting", tests_pid) if __name__ == "__main__":