Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Handle Warnings #760

Merged
merged 12 commits into from
Mar 30, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 11 additions & 1 deletion .github/workflows/downstream.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,11 @@ concurrency:
jobs:
tests:
runs-on: ubuntu-latest
timeout-minutes: 20
strategy:
matrix:
python-version: ["3.9"]
fail-fast: false
steps:
- name: Checkout
uses: actions/checkout@v2
Expand Down Expand Up @@ -50,7 +52,15 @@ jobs:
with:
package_name: jupyter_server

# Test using jupyter_kernel_test
jupyter_kernel_test:
runs-on: ubuntu-latest
timeout-minutes: 10
steps:
- name: Checkout
uses: actions/checkout@v2

- name: Base Setup
uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1

- name: Setup conda ${{ matrix.python-version }}
uses: conda-incubator/setup-miniconda@v2
Expand Down
7 changes: 5 additions & 2 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -45,8 +45,8 @@ jobs:

build-n-test-n-coverage:
name: Build, test and code coverage

runs-on: ${{ matrix.os }}
timeout-minutes: 15

strategy:
fail-fast: false
Expand Down Expand Up @@ -94,6 +94,7 @@ jobs:

docs:
runs-on: ubuntu-latest
timeout-minutes: 10
steps:
- name: Checkout
uses: actions/checkout@v2
Expand All @@ -111,6 +112,7 @@ jobs:
test_miniumum_verisons:
name: Test Minimum Versions
runs-on: ubuntu-latest
timeout-minutes: 10
steps:
- uses: actions/checkout@v2
- name: Base Setup
Expand All @@ -124,6 +126,7 @@ jobs:

test_prereleases:
name: Test Prereleases
timeout-minutes: 10
runs-on: ubuntu-latest
steps:
- name: Checkout
Expand All @@ -144,7 +147,7 @@ jobs:
make_sdist:
name: Make SDist
runs-on: ubuntu-latest
timeout-minutes: 20
timeout-minutes: 10
steps:
- uses: actions/checkout@v2
- name: Base Setup
Expand Down
10 changes: 10 additions & 0 deletions jupyter_client/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@

import zmq.asyncio
from traitlets import Any # type: ignore
from traitlets import Bool
from traitlets import Instance
from traitlets import Type

Expand Down Expand Up @@ -92,7 +93,10 @@ class KernelClient(ConnectionFileMixin):
# The PyZMQ Context to use for communication with the kernel.
context = Instance(zmq.asyncio.Context)

_created_context: Bool = Bool(False)

def _context_default(self) -> zmq.asyncio.Context:
self._created_context = True
return zmq.asyncio.Context()

# The classes to use for the various channels
Expand Down Expand Up @@ -282,6 +286,9 @@ def start_channels(
:meth:`start_kernel`. If the channels have been stopped and you
call this, :class:`RuntimeError` will be raised.
"""
# Create the context if needed.
if not self._created_context:
self.context = self._context_default()
if iopub:
self.iopub_channel.start()
if shell:
Expand Down Expand Up @@ -311,6 +318,9 @@ def stop_channels(self) -> None:
self.hb_channel.stop()
if self.control_channel.is_alive():
self.control_channel.stop()
if self._created_context:
self._created_context = False
self.context.destroy()

@property
def channels_running(self) -> bool:
Expand Down
2 changes: 2 additions & 0 deletions jupyter_client/kernelspec.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,8 @@

class KernelSpec(HasTraits):
argv = List()
name = Unicode()
mimetype = Unicode()
display_name = Unicode()
language = Unicode()
env = Dict()
Expand Down
2 changes: 1 addition & 1 deletion jupyter_client/kernelspecapp.py
Original file line number Diff line number Diff line change
Expand Up @@ -182,7 +182,7 @@ def _kernel_spec_manager_default(self):
return KernelSpecManager(data_dir=self.data_dir, parent=self)

flags = {
"f": ({"RemoveKernelSpec": {"force": True}}, force.get_metadata("help")),
"f": ({"RemoveKernelSpec": {"force": True}}, force.help),
}
flags.update(JupyterApp.flags)

Expand Down
4 changes: 3 additions & 1 deletion jupyter_client/manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,7 @@ def __init__(self, *args, **kwargs):
self._shutdown_status = _ShutdownStatus.Unset
# Create a place holder future.
try:
asyncio.get_running_loop()
self._ready = Future()
except RuntimeError:
# No event loop running, use concurrent future
Expand Down Expand Up @@ -476,7 +477,8 @@ async def _async_shutdown_kernel(self, now: bool = False, restart: bool = False)
# Stop monitoring for restarting while we shutdown.
self.stop_restarter()

await ensure_async(self.interrupt_kernel())
if self.has_kernel:
await ensure_async(self.interrupt_kernel())

if now:
await ensure_async(self._kill_kernel())
Expand Down
5 changes: 5 additions & 0 deletions jupyter_client/provisioning/local_provisioner.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,11 @@ async def wait(self) -> Optional[int]:

# Process is no longer alive, wait and clear
ret = self.process.wait()
# Make sure all the fds get closed.
for attr in ['stdout', 'stderr', 'stdin']:
fid = getattr(self.process, attr)
if fid:
fid.close()
self.process = None # allow has_process to now return False
return ret

Expand Down
4 changes: 1 addition & 3 deletions jupyter_client/ssh/tunnel.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,6 @@ class SSHException(Exception): # type: ignore
except ImportError:
pexpect = None

from zmq.utils.strtypes import b


def select_random_ports(n):
"""Select and return n random ports that are available."""
Expand All @@ -56,7 +54,7 @@ def select_random_ports(n):
# -----------------------------------------------------------------------------
# Check for passwordless login
# -----------------------------------------------------------------------------
_password_pat = re.compile(b(r"pass(word|phrase):"), re.IGNORECASE)
_password_pat = re.compile((r"pass(word|phrase):".encode("utf8")), re.IGNORECASE)


def try_passwordless_ssh(server, keyfile, paramiko=None):
Expand Down
21 changes: 21 additions & 0 deletions jupyter_client/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,30 @@

from .utils import test_env

try:
import resource
except ImportError:
# Windows
resource = None

pjoin = os.path.join


# Handle resource limit
# Ensure a minimal soft limit of DEFAULT_SOFT if the current hard limit is at least that much.
if resource is not None:
soft, hard = resource.getrlimit(resource.RLIMIT_NOFILE)

DEFAULT_SOFT = 4096
if hard >= DEFAULT_SOFT:
soft = DEFAULT_SOFT

if hard < soft:
hard = soft

resource.setrlimit(resource.RLIMIT_NOFILE, (soft, hard))


if os.name == "nt" and sys.version_info >= (3, 7):
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())

Expand Down
8 changes: 6 additions & 2 deletions jupyter_client/tests/test_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,12 @@ def setUp(self):
except NoSuchKernel:
pytest.skip()
self.km, self.kc = start_new_kernel(kernel_name=NATIVE_KERNEL_NAME)
self.addCleanup(self.kc.stop_channels)
self.addCleanup(self.km.shutdown_kernel)

def tearDown(self):
self.env_patch.stop()
self.km.shutdown_kernel()
self.kc.stop_channels()
return super().tearDown()

def test_execute_interactive(self):
kc = self.kc
Expand Down
1 change: 1 addition & 0 deletions jupyter_client/tests/test_kernelmanager.py
Original file line number Diff line number Diff line change
Expand Up @@ -440,6 +440,7 @@ def execute(cmd):

km.shutdown_kernel()
assert km.context.closed
kc.stop_channels()


@pytest.mark.asyncio
Expand Down
20 changes: 13 additions & 7 deletions jupyter_client/tests/test_multikernelmanager.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,10 @@ def setUp(self):
self.env_patch.start()
super().setUp()

def tearDown(self) -> None:
self.env_patch.stop()
return super().tearDown()

# static so picklable for multiprocessing on Windows
@staticmethod
def _get_tcp_km():
Expand Down Expand Up @@ -243,6 +247,10 @@ def setUp(self):
self.env_patch.start()
super().setUp()

def tearDown(self) -> None:
self.env_patch.stop()
return super().tearDown()

# static so picklable for multiprocessing on Windows
@staticmethod
def _get_tcp_km():
Expand Down Expand Up @@ -465,8 +473,9 @@ async def test_start_sequence_ipc_kernels(self):

def tcp_lifecycle_with_loop(self):
# Ensure each thread has an event loop
asyncio.set_event_loop(asyncio.new_event_loop())
asyncio.get_event_loop().run_until_complete(self.raw_tcp_lifecycle())
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
loop.run_until_complete(self.raw_tcp_lifecycle())

# static so picklable for multiprocessing on Windows
@classmethod
Expand All @@ -479,11 +488,8 @@ async def raw_tcp_lifecycle(cls, test_kid=None):
# static so picklable for multiprocessing on Windows
@classmethod
def raw_tcp_lifecycle_sync(cls, test_kid=None):
loop = asyncio.get_event_loop()
if loop.is_running():
# Forked MP, make new loop
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
loop.run_until_complete(cls.raw_tcp_lifecycle(test_kid=test_kid))

@gen_test
Expand Down
5 changes: 5 additions & 0 deletions jupyter_client/tests/test_provisioning.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,11 @@ async def wait(self) -> Optional[int]:

# Process is no longer alive, wait and clear
ret = self.process.wait()
# Make sure all the fds get closed.
for attr in ['stdout', 'stderr', 'stdin']:
fid = getattr(self.process, attr)
if fid:
fid.close()
self.process = None
return ret

Expand Down
4 changes: 3 additions & 1 deletion jupyter_client/tests/test_session.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@

import pytest
import zmq
from tornado import ioloop
from zmq.eventloop.zmqstream import ZMQStream
from zmq.tests import BaseZMQTestCase

Expand Down Expand Up @@ -171,7 +172,8 @@ def test_tracking(self):
a, b = self.create_bound_pair(zmq.PAIR, zmq.PAIR)
s = self.session
s.copy_threshold = 1
ZMQStream(a)
loop = ioloop.IOLoop(make_current=False)
ZMQStream(a, io_loop=loop)
msg = s.send(a, "hello", track=False)
self.assertTrue(msg["tracker"] is ss.DONE)
msg = s.send(a, "hello", track=True)
Expand Down
30 changes: 0 additions & 30 deletions jupyter_client/tests/test_utils.py

This file was deleted.

6 changes: 5 additions & 1 deletion jupyter_client/tests/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,11 @@ def start(self):

def stop(self):
self.env_patch.stop()
self.test_dir.cleanup()
try:
self.test_dir.cleanup()
except (PermissionError, NotADirectoryError):
if os.name != 'nt':
raise

def __enter__(self):
self.start()
Expand Down
4 changes: 2 additions & 2 deletions jupyter_client/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,14 +11,14 @@
def run_sync(coro):
def wrapped(*args, **kwargs):
try:
loop = asyncio.get_event_loop()
loop = asyncio.get_running_loop()
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This and only this line seem to be breaking some projects.
in particular https://github.com/Carreau/inplace_restarter.

It seem to be because asyncio.get_event_loop() can return the current event loop even if it's not (yet) running.

I'm not sure what a proper fix coulc be.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thanks for the heads up, I put a workaround in 0a2c228 (#772)

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thanks , I also fixed my project upstream, I'll try to have a look at #772

except RuntimeError:
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
import nest_asyncio # type: ignore

nest_asyncio.apply(loop)
future = asyncio.ensure_future(coro(*args, **kwargs))
future = asyncio.ensure_future(coro(*args, **kwargs), loop=loop)
try:
return loop.run_until_complete(future)
except BaseException as e:
Expand Down
25 changes: 25 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -36,3 +36,28 @@ testpaths = [
timeout = 300
# Restore this setting to debug failures
# timeout_method = "thread"
asyncio_mode = "auto"
filterwarnings= [
# Fail on warnings
"error",

# Workarounds for https://github.com/pytest-dev/pytest-asyncio/issues/77
"ignore:unclosed <socket.socket:ResourceWarning",
"ignore:unclosed event loop:ResourceWarning",

# Workaround for https://github.com/tornadoweb/tornado/issues/3106
# (To be fixed in Tornado 6.2)
"ignore:There is no current event loop:DeprecationWarning:tornado",

# Workaround for distutils.Version used in ipykernel
"ignore:The distutils package is deprecated and slated for removal:DeprecationWarning:ipykernel",

# ZMQ uses Future internally, which raises a DeprecationWarning
# When there is no loop running.
# We could eventually find a way to make sure these are only created
# when there is a running event loop.
"ignore:There is no current event loop:DeprecationWarning:zmq",

# Workaround for imp used in ipykernel
"ignore:the imp module is deprecated in favour of importlib:DeprecationWarning"
]
Loading