Skip to content
Permalink

Comparing changes

Choose two branches to see what’s changed or to start a new pull request. If you need to, you can also or learn more about diff comparisons.

Open a pull request

Create a new pull request by comparing changes across two branches. If you need to, you can also . Learn more about diff comparisons here.
base repository: jupyter/jupyter_client
Failed to load repositories. Confirm that selected base ref is valid, then try again.
Loading
base: v8.1.0
Choose a base ref
...
head repository: jupyter/jupyter_client
Failed to load repositories. Confirm that selected head ref is valid, then try again.
Loading
compare: dbf6b81fa5ab606eaedc5e8d0843debce18e8746
Choose a head ref
  • 4 commits
  • 16 files changed
  • 4 contributors

Commits on Apr 7, 2023

  1. Copy the full SHA
    98c87fe View commit details
  2. Copy the full SHA
    c5d9e1a View commit details
  3. [pre-commit.ci] pre-commit autoupdate (#943)

    * [pre-commit.ci] pre-commit autoupdate
    
    updates:
    - [github.com/python-jsonschema/check-jsonschema: 0.21.0 → 0.22.0](python-jsonschema/check-jsonschema@0.21.0...0.22.0)
    - [github.com/psf/black: 23.1.0 → 23.3.0](psf/black@23.1.0...23.3.0)
    - [github.com/charliermarsh/ruff-pre-commit: v0.0.254 → v0.0.260](astral-sh/ruff-pre-commit@v0.0.254...v0.0.260)
    
    * sync deps and run lint
    
    ---------
    
    Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
    Co-authored-by: Steven Silvester <steven.silvester@ieee.org>
    pre-commit-ci[bot] and blink1073 authored Apr 7, 2023
    Copy the full SHA
    58017fc View commit details

Commits on Apr 13, 2023

  1. Use local coverage (#945)

    blink1073 authored Apr 13, 2023
    Copy the full SHA
    dbf6b81 View commit details
17 changes: 12 additions & 5 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
@@ -65,10 +65,17 @@ jobs:
if: ${{ startsWith(matrix.os, 'windows') }}
run: |
hatch run cov:nowarn || hatch run test:nowarn --lf
- name: Code coverage
run: |
pip install codecov coverage[toml]
codecov
- uses: jupyterlab/maintainer-tools/.github/actions/upload-coverage@v1

coverage:
runs-on: ubuntu-latest
needs:
- test
steps:
- uses: actions/checkout@v3
- uses: jupyterlab/maintainer-tools/.github/actions/report-coverage@v1
with:
fail_under: 78

docs:
runs-on: windows-latest
@@ -148,7 +155,7 @@ jobs:
tests_check: # This job does nothing and is only used for the branch protection
if: always()
needs:
- test
- coverage
- docs
- lint
- check_links
6 changes: 3 additions & 3 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -20,7 +20,7 @@ repos:
- id: trailing-whitespace

- repo: https://github.com/python-jsonschema/check-jsonschema
rev: 0.21.0
rev: 0.22.0
hooks:
- id: check-github-workflows

@@ -30,12 +30,12 @@ repos:
- id: mdformat

- repo: https://github.com/psf/black
rev: 23.1.0
rev: 23.3.0
hooks:
- id: black

- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: v0.0.254
rev: v0.0.260
hooks:
- id: ruff
args: ["--fix"]
1 change: 0 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
# Jupyter Client

[![Build Status](https://github.com/jupyter/jupyter_client/workflows/CI/badge.svg)](https://github.com/jupyter/jupyter_client/actions)
[![codecov](https://codecov.io/gh/jupyter/jupyter_client/branch/main/graph/badge.svg?token=kxoFu4KnhT)](https://codecov.io/gh/jupyter/jupyter_client)
[![Documentation Status](https://readthedocs.org/projects/jupyter-client/badge/?version=latest)](http://jupyter-client.readthedocs.io/en/latest/?badge=latest)

`jupyter_client` contains the reference implementation of the [Jupyter protocol].
9 changes: 0 additions & 9 deletions codecov.yml

This file was deleted.

31 changes: 9 additions & 22 deletions jupyter_client/__init__.py
Original file line number Diff line number Diff line change
@@ -1,23 +1,10 @@
"""Client-side implementations of the Jupyter protocol"""
from ._version import __version__ # noqa
from ._version import protocol_version # noqa
from ._version import protocol_version_info # noqa
from ._version import version_info # noqa

try:
from .asynchronous import AsyncKernelClient # noqa
from .blocking import BlockingKernelClient
from .client import KernelClient
from .connect import * # noqa
from .launcher import * # noqa
from .manager import AsyncKernelManager
from .manager import KernelManager
from .manager import run_kernel
from .multikernelmanager import AsyncMultiKernelManager
from .multikernelmanager import MultiKernelManager
from .provisioning import KernelProvisionerBase
from .provisioning import LocalProvisioner
except ModuleNotFoundError:
import warnings

warnings.warn("Could not import submodules")
from ._version import __version__, protocol_version, protocol_version_info, version_info
from .asynchronous import AsyncKernelClient
from .blocking import BlockingKernelClient
from .client import KernelClient
from .connect import * # noqa
from .launcher import * # noqa
from .manager import AsyncKernelManager, KernelManager, run_kernel
from .multikernelmanager import AsyncMultiKernelManager, MultiKernelManager
from .provisioning import KernelProvisionerBase, LocalProvisioner
4 changes: 3 additions & 1 deletion jupyter_client/connect.py
Original file line number Diff line number Diff line change
@@ -191,7 +191,9 @@ def find_connection_file(
str : The absolute path of the connection file.
"""
if profile is not None:
warnings.warn("Jupyter has no profiles. profile=%s has been ignored." % profile)
warnings.warn(
"Jupyter has no profiles. profile=%s has been ignored." % profile, stacklevel=2
)
if path is None:
path = [".", jupyter_runtime_dir()]
if isinstance(path, str):
2 changes: 1 addition & 1 deletion jupyter_client/consoleapp.py
Original file line number Diff line number Diff line change
@@ -371,5 +371,5 @@ class IPythonConsoleApp(JupyterConsoleApp):

def __init__(self, *args, **kwargs):
"""Initialize the app."""
warnings.warn("IPythonConsoleApp is deprecated. Use JupyterConsoleApp")
warnings.warn("IPythonConsoleApp is deprecated. Use JupyterConsoleApp", stacklevel=2)
super().__init__(*args, **kwargs)
7 changes: 5 additions & 2 deletions jupyter_client/launcher.py
Original file line number Diff line number Diff line change
@@ -162,8 +162,11 @@ def launch_kernel(
msg = msg.format(cmd, env.get("PATH", os.defpath), without_env)
get_logger().error(msg)
except Exception as ex2: # Don't let a formatting/logger issue lead to the wrong exception
warnings.warn(f"Failed to run command: '{cmd}' due to exception: {ex}")
warnings.warn(f"The following exception occurred handling the previous failure: {ex2}")
warnings.warn(f"Failed to run command: '{cmd}' due to exception: {ex}", stacklevel=2)
warnings.warn(
f"The following exception occurred handling the previous failure: {ex2}",
stacklevel=2,
)
raise ex

if sys.platform == "win32":
2 changes: 1 addition & 1 deletion jupyter_client/localinterfaces.py
Original file line number Diff line number Diff line change
@@ -261,7 +261,7 @@ def _load_ips(suppress_exceptions=True):
if not suppress_exceptions:
raise
# unexpected error shouldn't crash, load dumb default values instead.
warn("Unexpected error discovering local network interfaces: %s" % e)
warn("Unexpected error discovering local network interfaces: %s" % e, stacklevel=2)
_load_ips_dumb()


2 changes: 1 addition & 1 deletion jupyter_client/manager.py
Original file line number Diff line number Diff line change
@@ -178,7 +178,7 @@ def _kernel_name_changed(self, change: t.Dict[str, str]) -> None:

@property
def kernel_spec(self) -> t.Optional[kernelspec.KernelSpec]:
if self._kernel_spec is None and self.kernel_name != "":
if self._kernel_spec is None and self.kernel_name != "": # noqa
self._kernel_spec = self.kernel_spec_manager.get_kernel_spec(self.kernel_name)
return self._kernel_spec

13 changes: 7 additions & 6 deletions jupyter_client/session.py
Original file line number Diff line number Diff line change
@@ -188,7 +188,7 @@ def default_secure(cfg: t.Any) -> None: # pragma: no cover
If Session.key/keyfile have not been set, set Session.key to
a new random UUID.
"""
warnings.warn("default_secure is deprecated", DeprecationWarning)
warnings.warn("default_secure is deprecated", DeprecationWarning, stacklevel=2)
if "Session" in cfg and ("key" in cfg.Session or "keyfile" in cfg.Session):
return
# key/keyfile not specified, generate new UUID:
@@ -567,7 +567,7 @@ def __init__(self, **kwargs):
self._check_packers()
self.none = self.pack({})
# ensure self._session_default() if necessary, so bsession is defined:
self.session
self.session # noqa
self.pid = os.getpid()
self._new_auth()
if not self.key:
@@ -861,9 +861,9 @@ def send(
stream.send_multipart(to_send, copy=copy)

if self.debug:
pprint.pprint(msg)
pprint.pprint(to_send)
pprint.pprint(buffers)
pprint.pprint(msg) # noqa
pprint.pprint(to_send) # noqa
pprint.pprint(buffers) # noqa

msg["tracker"] = tracker

@@ -1088,7 +1088,7 @@ def deserialize(
buffers = [memoryview(bytes(b.bytes)) for b in msg_list[5:]]
message["buffers"] = buffers
if self.debug:
pprint.pprint(message)
pprint.pprint(message) # noqa
# adapt to the current version
return adapt(message)

@@ -1098,5 +1098,6 @@ def unserialize(self, *args: t.Any, **kwargs: t.Any) -> t.Dict[str, t.Any]:
warnings.warn(
"Session.unserialize is deprecated. Use Session.deserialize.",
DeprecationWarning,
stacklevel=2,
)
return self.deserialize(*args, **kwargs)
8 changes: 4 additions & 4 deletions jupyter_client/ssh/tunnel.py
Original file line number Diff line number Diff line change
@@ -269,7 +269,7 @@ def openssh_tunnel(
return tunnel.pid
else:
if failed:
warnings.warn("Password rejected, try again")
warnings.warn("Password rejected, try again", stacklevel=2)
password = None
if password is None:
password = getpass("%s's password: " % (server))
@@ -378,7 +378,7 @@ def _paramiko_tunnel(lport, rport, server, remoteip, keyfile=None, password=None
# else:
# raise
except Exception as e:
warnings.warn("*** Failed to connect to %s:%d: %r" % (server, port, e))
warnings.warn("*** Failed to connect to %s:%d: %r" % (server, port, e), stacklevel=2)
sys.exit(1)

# Don't let SIGINT kill the tunnel subprocess
@@ -387,10 +387,10 @@ def _paramiko_tunnel(lport, rport, server, remoteip, keyfile=None, password=None
try:
forward_tunnel(lport, remoteip, rport, client.get_transport())
except KeyboardInterrupt:
warnings.warn("SIGINT: Port forwarding stopped cleanly")
warnings.warn("SIGINT: Port forwarding stopped cleanly", stacklevel=2)
sys.exit(0)
except Exception as e:
warnings.warn("Port forwarding stopped uncleanly: %s" % e)
warnings.warn("Port forwarding stopped uncleanly: %s" % e, stacklevel=2)
sys.exit(255)


76 changes: 54 additions & 22 deletions jupyter_client/threaded.py
Original file line number Diff line number Diff line change
@@ -5,7 +5,8 @@
import atexit
import time
from concurrent.futures import Future
from threading import Event, Thread
from functools import partial
from threading import Thread
from typing import Any, Dict, List, Optional

import zmq
@@ -54,17 +55,22 @@ def __init__(
self.socket = socket
self.session = session
self.ioloop = loop
evt = Event()
f: Future = Future()

def setup_stream():
assert self.socket is not None
self.stream = zmqstream.ZMQStream(self.socket, self.ioloop)
self.stream.on_recv(self._handle_recv)
evt.set()
try:
assert self.socket is not None
self.stream = zmqstream.ZMQStream(self.socket, self.ioloop)
self.stream.on_recv(self._handle_recv)
except Exception as e:
f.set_exception(e)
else:
f.set_result(None)

assert self.ioloop is not None
self.ioloop.add_callback(setup_stream)
evt.wait()
# don't wait forever, raise any errors
f.result(timeout=10)

_is_alive = False

@@ -179,13 +185,31 @@ def flush(self, timeout: float = 1.0) -> None:
"""
# We do the IOLoop callback process twice to ensure that the IOLoop
# gets to perform at least one full poll.
stop_time = time.time() + timeout
stop_time = time.monotonic() + timeout
assert self.ioloop is not None
if self.stream is None or self.stream.closed():
# don't bother scheduling flush on a thread if we're closed
_msg = "Attempt to flush closed stream"
raise OSError(_msg)

def flush(f):
try:
self._flush()
except Exception as e:
f.set_exception(e)
else:
f.set_result(None)

for _ in range(2):
self._flushed = False
self.ioloop.add_callback(self._flush)
while not self._flushed and time.time() < stop_time:
time.sleep(0.01)
f: Future = Future()
self.ioloop.add_callback(partial(flush, f))
# wait for async flush, re-raise any errors
timeout = max(stop_time - time.monotonic(), 0)
try:
f.result(max(stop_time - time.monotonic(), 0))
except TimeoutError:
# flush with a timeout means stop waiting, not raise
return

def _flush(self) -> None:
"""Callback for :method:`self.flush`."""
@@ -219,24 +243,32 @@ def start(self) -> None:
Don't return until self.ioloop is defined,
which is created in the thread
"""
self._start_event = Event()
self._start_future: Future = Future()
Thread.start(self)
self._start_event.wait()
# wait for start, re-raise any errors
self._start_future.result(timeout=10)

def run(self) -> None:
"""Run my loop, ignoring EINTR events in the poller"""
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)

async def assign_ioloop():
self.ioloop = IOLoop.current()

loop.run_until_complete(assign_ioloop())
except Exception as e:
self._start_future.set_exception(e)
else:
self._start_future.set_result(None)

loop.run_until_complete(self._async_run())

async def _async_run(self):
self.ioloop = IOLoop.current()
# signal that self.ioloop is defined
self._start_event.set()
while True:
"""Run forever (until self._exiting is set)"""
while not self._exiting:
await asyncio.sleep(1)
if self._exiting:
break

def stop(self) -> None:
"""Stop the channel's event loop and join its thread.
Loading