diff --git a/.travis.yml b/.travis.yml index bc262a9d..2beeda82 100644 --- a/.travis.yml +++ b/.travis.yml @@ -10,6 +10,8 @@ matrix: env: TOXENV=py38 - python: 3.8 env: TOXENV=flake8 + - python: 3.8 + env: TOXENV=mypy - python: 3.8 env: TOXENV=dist - python: 3.8 diff --git a/MANIFEST.in b/MANIFEST.in index fcec0bc1..e3e313a9 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -5,6 +5,7 @@ include requirements-dev.txt include *.md include .bumpversion.cfg include tox.ini +include mypy.ini include pyproject.toml # Code and test files diff --git a/mypy.ini b/mypy.ini new file mode 100644 index 00000000..d5ebf802 --- /dev/null +++ b/mypy.ini @@ -0,0 +1,24 @@ +# Global options: + +[mypy] +python_version = 3.6 + +# Per-module options: + +[mypy-ipython_genutils.*] +ignore_missing_imports = True + +[mypy-nbformat.*] +ignore_missing_imports = True + +[mypy-nest_asyncio.*] +ignore_missing_imports = True + +[mypy-async_generator.*] +ignore_missing_imports = True + +[mypy-traitlets.*] +ignore_missing_imports = True + +[mypy-jupyter_client.*] +ignore_missing_imports = True diff --git a/nbclient/__init__.py b/nbclient/__init__.py index 04ca111b..6110e8f6 100644 --- a/nbclient/__init__.py +++ b/nbclient/__init__.py @@ -4,7 +4,7 @@ from ._version import version as __version__ # noqa: F401 -def _cleanup(): +def _cleanup() -> None: pass diff --git a/nbclient/client.py b/nbclient/client.py index 3e6bd711..8d383ae0 100644 --- a/nbclient/client.py +++ b/nbclient/client.py @@ -11,11 +11,15 @@ from time import monotonic from queue import Empty import asyncio +import typing as t from traitlets.config.configurable import LoggingConfigurable from traitlets import List, Unicode, Bool, Enum, Any, Type, Dict, Integer, default +from nbformat import NotebookNode from nbformat.v4 import output_from_msg +from jupyter_client import KernelManager +from jupyter_client.client import KernelClient from .exceptions import ( CellControlSignal, @@ -28,7 +32,7 @@ from .output_widget import OutputWidget -def timestamp(): +def timestamp() -> str: return datetime.datetime.utcnow().isoformat() + 'Z' @@ -37,7 +41,7 @@ class NotebookClient(LoggingConfigurable): Encompasses a Client for executing cells in a notebook """ - timeout = Integer( + timeout: int = Integer( None, allow_none=True, help=dedent( @@ -51,7 +55,7 @@ class NotebookClient(LoggingConfigurable): ), ).tag(config=True) - timeout_func = Any( + timeout_func: t.Any = Any( default_value=None, allow_none=True, help=dedent( @@ -69,7 +73,7 @@ class NotebookClient(LoggingConfigurable): ), ).tag(config=True) - interrupt_on_timeout = Bool( + interrupt_on_timeout: bool = Bool( False, help=dedent( """ @@ -80,7 +84,7 @@ class NotebookClient(LoggingConfigurable): ), ).tag(config=True) - startup_timeout = Integer( + startup_timeout: int = Integer( 60, help=dedent( """ @@ -91,7 +95,7 @@ class NotebookClient(LoggingConfigurable): ), ).tag(config=True) - allow_errors = Bool( + allow_errors: bool = Bool( False, help=dedent( """ @@ -105,7 +109,7 @@ class NotebookClient(LoggingConfigurable): ), ).tag(config=True) - force_raise_errors = Bool( + force_raise_errors: bool = Bool( False, help=dedent( """ @@ -122,9 +126,9 @@ class NotebookClient(LoggingConfigurable): ), ).tag(config=True) - extra_arguments = List(Unicode()).tag(config=True) + extra_arguments: t.List = List(Unicode()).tag(config=True) - kernel_name = Unicode( + kernel_name: str = Unicode( '', help=dedent( """ @@ -134,7 +138,7 @@ class NotebookClient(LoggingConfigurable): ), ).tag(config=True) - raise_on_iopub_timeout = Bool( + raise_on_iopub_timeout: bool = Bool( False, help=dedent( """ @@ -149,7 +153,7 @@ class NotebookClient(LoggingConfigurable): ), ).tag(config=True) - store_widget_state = Bool( + store_widget_state: bool = Bool( True, help=dedent( """ @@ -159,7 +163,7 @@ class NotebookClient(LoggingConfigurable): ), ).tag(config=True) - record_timing = Bool( + record_timing: bool = Bool( True, help=dedent( """ @@ -169,7 +173,7 @@ class NotebookClient(LoggingConfigurable): ), ).tag(config=True) - iopub_timeout = Integer( + iopub_timeout: int = Integer( 4, allow_none=False, help=dedent( @@ -182,7 +186,7 @@ class NotebookClient(LoggingConfigurable): ), ).tag(config=True) - shell_timeout_interval = Integer( + shell_timeout_interval: int = Integer( 5, allow_none=False, help=dedent( @@ -207,7 +211,7 @@ class NotebookClient(LoggingConfigurable): ), ).tag(config=True) - ipython_hist_file = Unicode( + ipython_hist_file: str = Unicode( default_value=':memory:', help="""Path to file to use for SQLite history database for an IPython kernel. @@ -221,16 +225,16 @@ class NotebookClient(LoggingConfigurable): """, ).tag(config=True) - kernel_manager_class = Type(config=True, help='The kernel manager class to use.') + kernel_manager_class: KernelManager = Type(config=True, help='The kernel manager class to use.') @default('kernel_manager_class') - def _kernel_manager_class_default(self): + def _kernel_manager_class_default(self) -> KernelManager: """Use a dynamic default to avoid importing jupyter_client at startup""" from jupyter_client import AsyncKernelManager return AsyncKernelManager - _display_id_map = Dict( + _display_id_map: t.Dict[str, t.Dict] = Dict( help=dedent( """ mapping of locations of outputs with a given display_id @@ -245,7 +249,7 @@ def _kernel_manager_class_default(self): ) ) - display_data_priority = List( + display_data_priority: t.List = List( [ 'text/html', 'application/pdf', @@ -263,7 +267,7 @@ def _kernel_manager_class_default(self): """, ).tag(config=True) - resources = Dict( + resources: t.Dict = Dict( help=dedent( """ Additional resources used in the conversion process. For example, @@ -273,7 +277,11 @@ def _kernel_manager_class_default(self): ) ) - def __init__(self, nb, km=None, **kw): + def __init__( + self, + nb: NotebookNode, + km: t.Optional[KernelManager] = None, + **kw) -> None: """Initializes the execution manager. Parameters @@ -285,33 +293,34 @@ def __init__(self, nb, km=None, **kw): be created. """ super().__init__(**kw) - self.nb = nb - self.km = km + self.nb: NotebookNode = nb + self.km: t.Optional[KernelManager] = km + self.kc: t.Optional[KernelClient] = None self.reset_execution_trackers() - self.widget_registry = { + self.widget_registry: t.Dict[str, t.Dict] = { '@jupyter-widgets/output': { 'OutputModel': OutputWidget } } # comm_open_handlers should return an object with a .handle_msg(msg) method or None - self.comm_open_handlers = { + self.comm_open_handlers: t.Dict[str, t.Any] = { 'jupyter.widget': self.on_comm_open_jupyter_widget } - def reset_execution_trackers(self): + def reset_execution_trackers(self) -> None: """Resets any per-execution trackers. """ self.code_cells_executed = 0 self._display_id_map = {} - self.widget_state = {} - self.widget_buffers = {} + self.widget_state: t.Dict[str, t.Dict] = {} + self.widget_buffers: t.Dict[str, t.List[t.Dict[str, str]]] = {} # maps to list of hooks, where the last is used, this is used # to support nested use of output widgets. - self.output_hook_stack = collections.defaultdict(list) + self.output_hook_stack: t.Any = collections.defaultdict(list) # our front-end mimicing Output widgets - self.comm_objects = {} + self.comm_objects: t.Dict[str, t.Any] = {} - def start_kernel_manager(self): + def start_kernel_manager(self) -> KernelManager: """Creates a new kernel manager. Returns @@ -331,7 +340,8 @@ def start_kernel_manager(self): self.km.client_class = 'jupyter_client.asynchronous.AsyncKernelClient' return self.km - async def _async_cleanup_kernel(self): + async def _async_cleanup_kernel(self) -> None: + assert self.km is not None now = self.shutdown_kernel == "immediate" try: # Queue the manager to kill the process, and recover gracefully if it's already dead. @@ -344,13 +354,13 @@ async def _async_cleanup_kernel(self): finally: # Remove any state left over even if we failed to stop the kernel await ensure_async(self.km.cleanup()) - if getattr(self, "kc"): + if getattr(self, "kc") and self.kc is not None: await ensure_async(self.kc.stop_channels()) self.kc = None _cleanup_kernel = run_sync(_async_cleanup_kernel) - async def async_start_new_kernel_client(self, **kwargs): + async def async_start_new_kernel_client(self, **kwargs) -> t.Tuple[KernelClient, str]: """Creates a new kernel client. Parameters @@ -367,6 +377,7 @@ async def async_start_new_kernel_client(self, **kwargs): kernel_id : string-ized version 4 uuid The id of the started kernel. """ + assert self.km is not None resource_path = self.resources.get('metadata', {}).get('path') or None if resource_path and 'cwd' not in kwargs: kwargs["cwd"] = resource_path @@ -406,7 +417,7 @@ async def async_start_new_kernel_client(self, **kwargs): start_new_kernel_client = run_sync(async_start_new_kernel_client) @contextmanager - def setup_kernel(self, **kwargs): + def setup_kernel(self, **kwargs) -> t.Generator: """ Context manager for setting up the kernel to execute a notebook. @@ -419,7 +430,7 @@ def setup_kernel(self, **kwargs): # Can't use run_until_complete on an asynccontextmanager function :( if self.km is None: - self.start_kernel_manager() + self.km = self.start_kernel_manager() if not self.km.has_kernel: self.start_new_kernel_client(**kwargs) @@ -430,7 +441,7 @@ def setup_kernel(self, **kwargs): self._cleanup_kernel() @asynccontextmanager - async def async_setup_kernel(self, **kwargs): + async def async_setup_kernel(self, **kwargs) -> t.AsyncGenerator: """ Context manager for setting up the kernel to execute a notebook. @@ -443,7 +454,7 @@ async def async_setup_kernel(self, **kwargs): """ cleanup_kc = kwargs.pop('cleanup_kc', True) if self.km is None: - self.start_kernel_manager() + self.km = self.start_kernel_manager() # self._cleanup_kernel uses run_async, which ensures the ioloop is running again. # This is necessary as the ioloop has stopped once atexit fires. @@ -477,7 +488,10 @@ def on_signal(): except (NotImplementedError, RuntimeError): pass - async def async_execute(self, reset_kc=False, **kwargs): + async def async_execute( + self, + reset_kc: bool = False, + **kwargs) -> NotebookNode: """ Executes each code cell. @@ -501,6 +515,7 @@ async def async_execute(self, reset_kc=False, **kwargs): self.reset_execution_trackers() async with self.async_setup_kernel(**kwargs): + assert self.kc is not None self.log.info("Executing notebook with kernel: %s" % self.kernel_name) for index, cell in enumerate(self.nb.cells): # Ignore `'execution_count' in content` as it's always 1 @@ -510,14 +525,15 @@ async def async_execute(self, reset_kc=False, **kwargs): ) msg_id = await ensure_async(self.kc.kernel_info()) info_msg = await self.async_wait_for_reply(msg_id) - self.nb.metadata['language_info'] = info_msg['content']['language_info'] + if info_msg is not None: + self.nb.metadata['language_info'] = info_msg['content']['language_info'] self.set_widgets_metadata() return self.nb execute = run_sync(async_execute) - def set_widgets_metadata(self): + def set_widgets_metadata(self) -> None: if self.widget_state: self.nb.metadata.widgets = { 'application/vnd.jupyter.widget-state+json': { @@ -537,7 +553,10 @@ def set_widgets_metadata(self): if buffers: widget['buffers'] = buffers - def _update_display_id(self, display_id, msg): + def _update_display_id( + self, + display_id: str, + msg: t.Dict) -> None: """Update outputs with a given display_id""" if display_id not in self._display_id_map: self.log.debug("display id %r not in %s", display_id, self._display_id_map) @@ -559,12 +578,21 @@ def _update_display_id(self, display_id, msg): outputs[output_idx]['data'] = out['data'] outputs[output_idx]['metadata'] = out['metadata'] - async def _async_poll_for_reply(self, msg_id, cell, timeout, task_poll_output_msg): + async def _async_poll_for_reply( + self, + msg_id: str, + cell: NotebookNode, + timeout: t.Optional[int], + task_poll_output_msg: asyncio.Future) -> t.Dict: + + assert self.kc is not None + new_timeout: t.Optional[float] = None if timeout is not None: deadline = monotonic() + timeout + new_timeout = float(timeout) while True: try: - msg = await ensure_async(self.kc.shell_channel.get_msg(timeout=timeout)) + msg = await ensure_async(self.kc.shell_channel.get_msg(timeout=new_timeout)) if msg['parent_header'].get('msg_id') == msg_id: if self.record_timing: cell['metadata']['execution']['shell.execute_reply'] = timestamp() @@ -579,14 +607,21 @@ async def _async_poll_for_reply(self, msg_id, cell, timeout, task_poll_output_ms self.log.warning("Timeout waiting for IOPub output") return msg else: - if timeout is not None: - timeout = max(0, deadline - monotonic()) + if new_timeout is not None: + new_timeout = max(0, deadline - monotonic()) except Empty: # received no message, check if kernel is still alive + assert timeout is not None await self._async_check_alive() await self._async_handle_timeout(timeout, cell) - async def _async_poll_output_msg(self, parent_msg_id, cell, cell_index): + async def _async_poll_output_msg( + self, + parent_msg_id: str, + cell: NotebookNode, + cell_index: int) -> None: + + assert self.kc is not None while True: msg = await ensure_async(self.kc.iopub_channel.get_msg(timeout=None)) if msg['parent_header'].get('msg_id') == parent_msg_id: @@ -596,7 +631,7 @@ async def _async_poll_output_msg(self, parent_msg_id, cell, cell_index): except CellExecutionComplete: return - def _get_timeout(self, cell): + def _get_timeout(self, cell: t.Optional[NotebookNode]) -> int: if self.timeout_func is not None and cell is not None: timeout = self.timeout_func(cell) else: @@ -607,22 +642,33 @@ def _get_timeout(self, cell): return timeout - async def _async_handle_timeout(self, timeout, cell=None): + async def _async_handle_timeout( + self, + timeout: int, + cell: t.Optional[NotebookNode] = None) -> None: + self.log.error("Timeout waiting for execute reply (%is)." % timeout) if self.interrupt_on_timeout: self.log.error("Interrupting kernel") + assert self.km is not None await ensure_async(self.km.interrupt_kernel()) else: raise CellTimeoutError.error_from_timeout_and_cell( "Cell execution timed out", timeout, cell ) - async def _async_check_alive(self): + async def _async_check_alive(self) -> None: + assert self.kc is not None if not await ensure_async(self.kc.is_alive()): self.log.error("Kernel died while waiting for execute reply.") raise DeadKernelError("Kernel died") - async def async_wait_for_reply(self, msg_id, cell=None): + async def async_wait_for_reply( + self, + msg_id: str, + cell: t.Optional[NotebookNode] = None) -> t.Optional[t.Dict]: + + assert self.kc is not None # wait for finish, with timeout timeout = self._get_timeout(cell) cummulative_time = 0 @@ -642,26 +688,22 @@ async def async_wait_for_reply(self, msg_id, cell=None): else: if msg['parent_header'].get('msg_id') == msg_id: return msg + return None wait_for_reply = run_sync(async_wait_for_reply) # Backwards compatability naming for papermill _wait_for_reply = wait_for_reply - def _timeout_with_deadline(self, timeout, deadline): - if deadline is not None and deadline - monotonic() < timeout: - timeout = deadline - monotonic() - - if timeout < 0: - timeout = 0 - - return timeout - - def _passed_deadline(self, deadline): + def _passed_deadline(self, deadline: int) -> bool: if deadline is not None and deadline - monotonic() <= 0: return True return False - def _check_raise_for_error(self, cell, exec_reply): + def _check_raise_for_error( + self, + cell: NotebookNode, + exec_reply: t.Optional[t.Dict]) -> None: + cell_allows_errors = self.allow_errors or "raises-exception" in cell.metadata.get( "tags", [] ) @@ -670,7 +712,12 @@ def _check_raise_for_error(self, cell, exec_reply): if (exec_reply is not None) and exec_reply['content']['status'] == 'error': raise CellExecutionError.from_cell_and_msg(cell, exec_reply['content']) - async def async_execute_cell(self, cell, cell_index, execution_count=None, store_history=True): + async def async_execute_cell( + self, + cell: NotebookNode, + cell_index: int, + execution_count: t.Optional[int] = None, + store_history: bool = True) -> NotebookNode: """ Executes a single code cell. @@ -704,6 +751,7 @@ async def async_execute_cell(self, cell, cell_index, execution_count=None, store cell : NotebookNode The cell which was just processed. """ + assert self.kc is not None if cell.cell_type != 'code' or not cell.source.strip(): self.log.debug("Skipping non-executing cell %s", cell_index) return cell @@ -750,7 +798,11 @@ async def async_execute_cell(self, cell, cell_index, execution_count=None, store execute_cell = run_sync(async_execute_cell) - def process_message(self, msg, cell, cell_index): + def process_message( + self, + msg: t.Dict, + cell: NotebookNode, + cell_index: int) -> t.Optional[t.List]: """ Processes a kernel message, updates cell state, and returns the resulting output object that was appended to cell.outputs. @@ -810,8 +862,15 @@ def process_message(self, msg, cell, cell_index): elif msg_type not in ['execute_input', 'update_display_data']: # Assign output as our processed "result" return self.output(cell.outputs, msg, display_id, cell_index) + return None + + def output( + self, + outs: t.List, + msg: t.Dict, + display_id: str, + cell_index: int) -> t.Optional[t.List]: - def output(self, outs, msg, display_id, cell_index): msg_type = msg['msg_type'] parent_msg_id = msg['parent_header'].get('msg_id') @@ -820,13 +879,13 @@ def output(self, outs, msg, display_id, cell_index): # default output behaviour (e.g. OutputWidget) hook = self.output_hook_stack[parent_msg_id][-1] hook.output(outs, msg, display_id, cell_index) - return + return None try: out = output_from_msg(msg) except ValueError: self.log.error("unhandled iopub msg: " + msg_type) - return + return None if self.clear_before_next_output: self.log.debug('Executing delayed clear_output') @@ -845,7 +904,12 @@ def output(self, outs, msg, display_id, cell_index): return out - def clear_output(self, outs, msg, cell_index): + def clear_output( + self, + outs: t.List, + msg: t.Dict, + cell_index: int) -> None: + content = msg['content'] parent_msg_id = msg['parent_header'].get('msg_id') @@ -864,12 +928,20 @@ def clear_output(self, outs, msg, cell_index): outs[:] = [] self.clear_display_id_mapping(cell_index) - def clear_display_id_mapping(self, cell_index): + def clear_display_id_mapping( + self, + cell_index: int) -> None: + for display_id, cell_map in self._display_id_map.items(): if cell_index in cell_map: cell_map[cell_index] = [] - def handle_comm_msg(self, outs, msg, cell_index): + def handle_comm_msg( + self, + outs: t.List, + msg: t.Dict, + cell_index: int) -> None: + content = msg['content'] data = content['data'] if self.store_widget_state and 'state' in data: # ignore custom msg'es @@ -894,7 +966,7 @@ def handle_comm_msg(self, outs, msg, cell_index): if comm_id in self.comm_objects: self.comm_objects[comm_id].handle_msg(msg) - def _serialize_widget_state(self, state): + def _serialize_widget_state(self, state: t.Dict) -> t.Dict[str, t.Any]: """Serialize a widget state, following format in @jupyter-widgets/schema.""" return { 'model_name': state.get('_model_name'), @@ -903,7 +975,7 @@ def _serialize_widget_state(self, state): 'state': state, } - def _get_buffer_data(self, msg): + def _get_buffer_data(self, msg: t.Dict) -> t.List[t.Dict[str, str]]: encoded_buffers = [] paths = msg['content']['data']['buffer_paths'] buffers = msg['buffers'] @@ -917,7 +989,10 @@ def _get_buffer_data(self, msg): ) return encoded_buffers - def register_output_hook(self, msg_id, hook): + def register_output_hook( + self, + msg_id: str, + hook: OutputWidget) -> None: """Registers an override object that handles output/clear_output instead. Multiple hooks can be registered, where the last one will be used (stack based) @@ -926,14 +1001,17 @@ def register_output_hook(self, msg_id, hook): # https://jupyterlab.github.io/jupyterlab/services/interfaces/kernel.ikernelconnection.html#registermessagehook self.output_hook_stack[msg_id].append(hook) - def remove_output_hook(self, msg_id, hook): + def remove_output_hook( + self, + msg_id: str, + hook: OutputWidget) -> None: """Unregisters an override object that handles output/clear_output instead""" # mimics # https://jupyterlab.github.io/jupyterlab/services/interfaces/kernel.ikernelconnection.html#removemessagehook removed_hook = self.output_hook_stack[msg_id].pop() assert removed_hook == hook - def on_comm_open_jupyter_widget(self, msg): + def on_comm_open_jupyter_widget(self, msg: t.Dict): content = msg['content'] data = content['data'] state = data['state'] @@ -945,7 +1023,11 @@ def on_comm_open_jupyter_widget(self, msg): return widget_class(comm_id, state, self.kc, self) -def execute(nb, cwd=None, km=None, **kwargs): +def execute( + nb: NotebookNode, + cwd: t.Optional[str] = None, + km: t.Optional[KernelManager] = None, + **kwargs) -> NotebookClient: """Execute a notebook's code, updating outputs within the notebook object. This is a convenient wrapper around NotebookClient. It returns the diff --git a/nbclient/exceptions.py b/nbclient/exceptions.py index 679cf5db..07848999 100644 --- a/nbclient/exceptions.py +++ b/nbclient/exceptions.py @@ -1,3 +1,8 @@ +from typing import Dict + +from nbformat import NotebookNode + + class CellControlSignal(Exception): """ A custom exception used to indicate that the exception is used for cell @@ -13,7 +18,11 @@ class CellTimeoutError(TimeoutError, CellControlSignal): """ @classmethod - def error_from_timeout_and_cell(cls, msg, timeout, cell): + def error_from_timeout_and_cell( + cls, + msg: str, + timeout: int, + cell: NotebookNode): if cell and cell.source: src_by_lines = cell.source.strip().split("\n") src = ( @@ -49,23 +58,30 @@ class CellExecutionError(CellControlSignal): failures gracefully. """ - def __init__(self, traceback, ename, evalue): + def __init__( + self, + traceback: str, + ename: str, + evalue: str) -> None: super(CellExecutionError, self).__init__(traceback) self.traceback = traceback self.ename = ename self.evalue = evalue - def __str__(self): + def __str__(self) -> str: s = self.__unicode__() if not isinstance(s, str): s = s.encode('utf8', 'replace') return s - def __unicode__(self): + def __unicode__(self) -> str: return self.traceback @classmethod - def from_cell_and_msg(cls, cell, msg): + def from_cell_and_msg( + cls, + cell: NotebookNode, + msg: Dict): """Instantiate from a code cell object and a message contents (message is either execute_reply or error) """ @@ -82,7 +98,7 @@ def from_cell_and_msg(cls, cell, msg): ) -exec_err_msg = u"""\ +exec_err_msg: str = u"""\ An error occurred while executing the following cell: ------------------ {cell.source} @@ -93,7 +109,7 @@ def from_cell_and_msg(cls, cell, msg): """ -timeout_err_msg = u"""\ +timeout_err_msg: str = u"""\ A cell timed out while it was being executed, after {timeout} seconds. The message was: {msg}. Here is a preview of the cell contents: diff --git a/nbclient/jsonutil.py b/nbclient/jsonutil.py index a14865bf..7bd52ddc 100644 --- a/nbclient/jsonutil.py +++ b/nbclient/jsonutil.py @@ -11,6 +11,7 @@ import types from datetime import datetime import numbers +from typing import Dict from ipython_genutils import py3compat @@ -51,7 +52,7 @@ PDF64 = b'JVBER' -def encode_images(format_dict): +def encode_images(format_dict: Dict) -> Dict[str, str]: """b64-encodes images in a displaypub format dict Perhaps this should be handled in json_clean itself? diff --git a/nbclient/output_widget.py b/nbclient/output_widget.py index 5e50c387..0f14afc8 100644 --- a/nbclient/output_widget.py +++ b/nbclient/output_widget.py @@ -1,19 +1,33 @@ from .jsonutil import json_clean from nbformat.v4 import output_from_msg +from typing import Dict, List, Any, Optional + +from jupyter_client.client import KernelClient class OutputWidget: """This class mimics a front end output widget""" - def __init__(self, comm_id, state, kernel_client, executor): - self.comm_id = comm_id - self.state = state - self.kernel_client = kernel_client + def __init__( + self, + comm_id: str, + state: Dict[str, Any], + kernel_client: KernelClient, + executor) -> None: + + self.comm_id: str = comm_id + self.state: Dict[str, Any] = state + self.kernel_client: KernelClient = kernel_client self.executor = executor - self.topic = ('comm-%s' % self.comm_id).encode('ascii') - self.outputs = self.state['outputs'] - self.clear_before_next_output = False + self.topic: bytes = ('comm-%s' % self.comm_id).encode('ascii') + self.outputs: List = self.state['outputs'] + self.clear_before_next_output: bool = False + + def clear_output( + self, + outs: List, + msg: Dict, + cell_index: int) -> None: - def clear_output(self, outs, msg, cell_index): self.parent_header = msg['parent_header'] content = msg['content'] if content.get('wait'): @@ -26,12 +40,18 @@ def clear_output(self, outs, msg, cell_index): # sync the state to the nbconvert state as well, since that is used for testing self.executor.widget_state[self.comm_id]['outputs'] = self.outputs - def sync_state(self): + def sync_state(self) -> None: state = {'outputs': self.outputs} msg = {'method': 'update', 'state': state, 'buffer_paths': []} self.send(msg) - def _publish_msg(self, msg_type, data=None, metadata=None, buffers=None, **keys): + def _publish_msg( + self, + msg_type: str, + data: Optional[Dict] = None, + metadata: Optional[Dict] = None, + buffers: Optional[List] = None, + **keys) -> None: """Helper for sending a comm message on IOPub""" data = {} if data is None else data metadata = {} if metadata is None else metadata @@ -40,10 +60,21 @@ def _publish_msg(self, msg_type, data=None, metadata=None, buffers=None, **keys) metadata=metadata) self.kernel_client.shell_channel.send(msg) - def send(self, data=None, metadata=None, buffers=None): + def send( + self, + data: Optional[Dict] = None, + metadata: Optional[Dict] = None, + buffers: Optional[List] = None) -> None: + self._publish_msg('comm_msg', data=data, metadata=metadata, buffers=buffers) - def output(self, outs, msg, display_id, cell_index): + def output( + self, + outs: List, + msg: Dict, + display_id: str, + cell_index: int) -> None: + if self.clear_before_next_output: self.outputs = [] self.clear_before_next_output = False @@ -66,7 +97,7 @@ def output(self, outs, msg, display_id, cell_index): # sync the state to the nbconvert state as well, since that is used for testing self.executor.widget_state[self.comm_id]['outputs'] = self.outputs - def set_state(self, state): + def set_state(self, state: Dict) -> None: if 'msg_id' in state: msg_id = state.get('msg_id') if msg_id: @@ -76,7 +107,7 @@ def set_state(self, state): self.executor.remove_output_hook(self.msg_id, self) self.msg_id = msg_id - def handle_msg(self, msg): + def handle_msg(self, msg: Dict) -> None: content = msg['content'] comm_id = content['comm_id'] assert comm_id == self.comm_id diff --git a/nbclient/util.py b/nbclient/util.py index eeaa6199..9ac4e219 100644 --- a/nbclient/util.py +++ b/nbclient/util.py @@ -6,28 +6,30 @@ import asyncio import sys import inspect +from typing import Callable, Awaitable, Any, Union -def check_ipython(): +def check_ipython() -> None: # original from vaex/asyncio.py IPython = sys.modules.get('IPython') if IPython: - IPython_version = tuple(map(int, IPython.__version__.split('.'))) + IPython_version = tuple(map(int, IPython.__version__.split('.'))) # type: ignore if IPython_version < (7, 0, 0): - raise RuntimeError(f'You are using IPython {IPython.__version__} while we require' - '7.0.0+, please update IPython') + raise RuntimeError(f'You are using IPython {IPython.__version__} ' # type: ignore + 'while we require 7.0.0+, please update IPython') -def check_patch_tornado(): +def check_patch_tornado() -> None: """If tornado is imported, add the patched asyncio.Future to its tuple of acceptable Futures""" # original from vaex/asyncio.py if 'tornado' in sys.modules: import tornado.concurrent if asyncio.Future not in tornado.concurrent.FUTURES: - tornado.concurrent.FUTURES = tornado.concurrent.FUTURES + (asyncio.Future, ) + tornado.concurrent.FUTURES = \ + tornado.concurrent.FUTURES + (asyncio.Future, ) # type: ignore -def just_run(coro): +def just_run(coro: Awaitable) -> Any: """Make the coroutine run, even if there is an event loop running (using nest_asyncio)""" # original from vaex/asyncio.py loop = asyncio._get_running_loop() @@ -51,7 +53,7 @@ def just_run(coro): return loop.run_until_complete(coro) -def run_sync(coro): +def run_sync(coro: Callable) -> Callable: """Runs a coroutine and blocks until it has executed. An event loop is created if no one already exists. If an event loop is @@ -74,7 +76,7 @@ def wrapped(*args, **kwargs): return wrapped -async def ensure_async(obj): +async def ensure_async(obj: Union[Awaitable, Any]) -> Any: """Convert a non-awaitable object to a coroutine if needed, and await it if it was not already awaited. """ diff --git a/requirements-dev.txt b/requirements-dev.txt index 484c6fe8..3e672cf9 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -7,6 +7,7 @@ pytest>=4.1 pytest-cov>=2.6.1 check-manifest flake8 +mypy tox bumpversion xmltodict diff --git a/tox.ini b/tox.ini index 77a23631..b6cb726b 100644 --- a/tox.ini +++ b/tox.ini @@ -1,12 +1,12 @@ [tox] skipsdist = true -envlist = py{36,37,38}, flake8, dist, manifest, docs +envlist = py{36,37,38}, flake8, mypy, dist, manifest, docs [gh-actions] python = 3.6: py36 3.7: py37 - 3.8: py38, flake8, dist, manifest + 3.8: py38, flake8, mypy, dist, manifest # Linters [testenv:flake8] @@ -14,6 +14,12 @@ skip_install = true deps = flake8 commands = flake8 nbclient --count --ignore=E203,E731,F811,W503 --max-complexity=23 --max-line-length=100 --show-source --statistics +# Type check +[testenv:mypy] +skip_install = true +deps = mypy +commands = mypy nbclient/client.py nbclient/exceptions.py nbclient/__init__.py nbclient/jsonutil.py nbclient/output_widget.py nbclient/util.py nbclient/_version.py + # Manifest [testenv:manifest] skip_install = true @@ -50,6 +56,7 @@ basepython = py37: python3.7 py38: python3.8 flake8: python3.8 + mypy: python3.8 manifest: python3.8 binder: python3.8 dist: python3.8