diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml index 987ea15c83..94718742eb 100644 --- a/.github/workflows/integration-tests.yml +++ b/.github/workflows/integration-tests.yml @@ -11,7 +11,7 @@ jobs: fail-fast: false matrix: os: [ubuntu-latest] - python-version: ["3.7", "3.8", "3.9", "3.10", "pypy-3.7"] + python-version: ["3.8", "3.9", "3.10", "3.11", "pypy-3.8"] steps: - name: Checkout uses: actions/checkout@v2 diff --git a/.github/workflows/python-tests.yml b/.github/workflows/python-tests.yml index 4762bdeb95..99b2810eea 100644 --- a/.github/workflows/python-tests.yml +++ b/.github/workflows/python-tests.yml @@ -18,14 +18,14 @@ jobs: fail-fast: false matrix: os: [ubuntu-latest, windows-latest, macos-latest] - python-version: ["3.7", "3.10"] + python-version: ["3.8", "3.11"] include: - os: windows-latest python-version: "3.9" - os: ubuntu-latest python-version: "pypy-3.8" - os: macos-latest - python-version: "3.8" + python-version: "3.10" steps: - name: Checkout uses: actions/checkout@v2 @@ -33,15 +33,32 @@ jobs: uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 - name: Run the tests if: ${{ !startsWith(matrix.python-version, 'pypy') && !startsWith(matrix.os, 'windows') }} - run: hatch run cov:test || hatch run cov:test --lf + run: hatch run cov:test -W default || hatch run cov:test -W default --lf - name: Run the tests on pypy and windows if: ${{ startsWith(matrix.python-version, 'pypy') || startsWith(matrix.os, 'windows') }} - run: hatch run test:test || hatch run test:test --lf + run: hatch run test:test -W default || hatch run test:test -W default --lf - name: Coverage run: | pip install codecov codecov + client8: + runs-on: ${{ matrix.os }} + timeout-minutes: 20 + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest, windows-latest, macos-latest] + python-version: ["3.10"] + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Base Setup + uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 + - run: | + pip install -U pre jupyter_client + hatch run test:test || hatch run test:test --lf + pre-commit: name: pre-commit runs-on: ubuntu-latest @@ -94,7 +111,7 @@ jobs: - name: Base Setup uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 with: - python_version: "3.7" + python_version: "3.8" - name: Install miniumum versions uses: jupyterlab/maintainer-tools/.github/actions/install-minimums@v1 - name: Run the unit tests @@ -110,11 +127,11 @@ jobs: - name: Base Setup uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 with: - python_version: "3.11.0-beta - 3.11.0" + python_version: "3.11" - name: Install the Python dependencies run: | pip install --no-deps . - pip install --pre --upgrade "jupyter_server[test]" + pip install --pre --upgrade ".[test]" - name: List installed packages run: | pip freeze diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6552761a62..c50ff27c51 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -50,7 +50,7 @@ repos: rev: v3.2.0 hooks: - id: pyupgrade - args: [--py37-plus] + args: [--py38-plus] - repo: https://github.com/PyCQA/doc8 rev: v1.0.0 diff --git a/examples/simple/README.md b/examples/simple/README.md index 2652c2da4f..49fe480252 100644 --- a/examples/simple/README.md +++ b/examples/simple/README.md @@ -10,7 +10,7 @@ You need `python3` to build and run the server extensions. # Clone, create a conda env and install from source. git clone https://github.com/jupyter/jupyter_server && \ cd examples/simple && \ - conda create -y -n jupyter-server-example python=3.7 && \ + conda create -y -n jupyter-server-example python=3.9 && \ conda activate jupyter-server-example && \ pip install -e .[test] ``` diff --git a/examples/simple/pytest.ini b/examples/simple/pytest.ini index 83dd817a2b..4f95513e55 100644 --- a/examples/simple/pytest.ini +++ b/examples/simple/pytest.ini @@ -1,3 +1,4 @@ [pytest] # Disable any upper exclusion. norecursedirs = +asyncio_mode = auto diff --git a/examples/simple/setup.py b/examples/simple/setup.py index 1aeea36e2f..d25c45b896 100644 --- a/examples/simple/setup.py +++ b/examples/simple/setup.py @@ -37,13 +37,13 @@ def add_data_files(path): version=VERSION, description="Jupyter Server Example", long_description=open("README.md").read(), - python_requires=">=3.7", + python_requires=">=3.8", install_requires=[ "jupyter_server", "jinja2", ], extras_require={ - "test": ["pytest"], + "test": ["pytest", "pytest-asyncio"], }, include_package_data=True, cmdclass=cmdclass, diff --git a/jupyter_server/__init__.py b/jupyter_server/__init__.py index 25d09ee20e..c6ca245c91 100644 --- a/jupyter_server/__init__.py +++ b/jupyter_server/__init__.py @@ -1,8 +1,6 @@ """The Jupyter Server""" import os import pathlib -import subprocess -import sys DEFAULT_STATIC_FILES_PATH = os.path.join(os.path.dirname(__file__), "static") DEFAULT_TEMPLATE_PATH_LIST = [ @@ -21,12 +19,3 @@ def _cleanup(): pass - - -# patch subprocess on Windows for python<3.7 -# see https://bugs.python.org/issue37380 -# the fix for python3.7: https://github.com/python/cpython/pull/15706/files -if sys.platform == "win32": - if sys.version_info < (3, 7): - subprocess._cleanup = _cleanup - subprocess._active = None diff --git a/jupyter_server/base/zmqhandlers.py b/jupyter_server/base/zmqhandlers.py index 8600ed33a0..252bbba0de 100644 --- a/jupyter_server/base/zmqhandlers.py +++ b/jupyter_server/base/zmqhandlers.py @@ -19,7 +19,7 @@ from jupyter_client.session import Session from tornado import ioloop, web from tornado.iostream import IOStream -from tornado.websocket import WebSocketHandler +from tornado.websocket import WebSocketClosedError, WebSocketHandler from .handlers import JupyterHandler @@ -302,7 +302,10 @@ def _on_zmq_reply(self, stream, msg_list): except Exception: self.log.critical("Malformed message: %r" % msg_list, exc_info=True) else: - self.write_message(msg, binary=isinstance(msg, bytes)) + try: + self.write_message(msg, binary=isinstance(msg, bytes)) + except WebSocketClosedError as e: + self.log.warning(str(e)) class AuthenticatedZMQStreamHandler(ZMQStreamHandler, JupyterHandler): diff --git a/jupyter_server/pytest_plugin.py b/jupyter_server/pytest_plugin.py index a34fdf62a2..dead827c28 100644 --- a/jupyter_server/pytest_plugin.py +++ b/jupyter_server/pytest_plugin.py @@ -1,5 +1,6 @@ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. +import asyncio import importlib import io import json @@ -9,11 +10,14 @@ import sys import urllib.parse from binascii import hexlify +from contextlib import closing import jupyter_core.paths import nbformat import pytest import tornado +import tornado.testing +from pytest_tornasync.plugin import AsyncHTTPServerClient from tornado.escape import url_escape from tornado.httpclient import HTTPClientError from tornado.websocket import WebSocketHandler @@ -35,9 +39,7 @@ ] -import asyncio - -if os.name == "nt" and sys.version_info >= (3, 7): +if os.name == "nt": asyncio.set_event_loop_policy( asyncio.WindowsSelectorEventLoopPolicy() # type:ignore[attr-defined] ) @@ -45,6 +47,9 @@ # ============ Move to Jupyter Core ============= +# Once the chunk below moves to Jupyter Core +# use the fixtures directly from Jupyter Core. + def mkdir(tmp_path, *parts): path = tmp_path.joinpath(*parts) @@ -130,6 +135,55 @@ def jp_environ( # ================= End: Move to Jupyter core ================ +@pytest.fixture +def asyncio_loop(): + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + yield loop + loop.close() + + +@pytest.fixture(autouse=True) +def io_loop(asyncio_loop): + async def get_tornado_loop(): + return tornado.ioloop.IOLoop.current() + + return asyncio_loop.run_until_complete(get_tornado_loop()) + + +@pytest.fixture +def http_server_client(http_server, io_loop): + """ + Create an asynchronous HTTP client that can fetch from `http_server`. + """ + + async def get_client(): + return AsyncHTTPServerClient(http_server=http_server) + + client = io_loop.run_sync(get_client) + with closing(client) as context: + yield context + + +@pytest.fixture +def http_server(io_loop, http_server_port, jp_web_app): + """Start a tornado HTTP server that listens on all available interfaces.""" + + async def get_server(): + server = tornado.httpserver.HTTPServer(jp_web_app) + server.add_socket(http_server_port[0]) + return server + + server = io_loop.run_sync(get_server) + yield server + server.stop() + + if hasattr(server, "close_all_connections"): + io_loop.run_sync(server.close_all_connections) + + http_server_port[0].close() + + @pytest.fixture def jp_server_config(): """Allows tests to setup their specific configuration values.""" @@ -167,7 +221,8 @@ def jp_extension_environ(jp_env_config_path, monkeypatch): @pytest.fixture def jp_http_port(http_server_port): """Returns the port value from the http_server_port fixture.""" - return http_server_port[-1] + yield http_server_port[-1] + http_server_port[0].close() @pytest.fixture @@ -216,8 +271,8 @@ def jp_configurable_serverapp( jp_base_url, tmp_path, jp_root_dir, - io_loop, jp_logging_stream, + asyncio_loop, ): """Starts a Jupyter Server instance based on the provided configuration values. @@ -254,8 +309,9 @@ def _configurable_serverapp( ): c = Config(config) c.NotebookNotary.db_file = ":memory:" - token = hexlify(os.urandom(4)).decode("ascii") - c.IdentityProvider.token = token + if "token" not in c.ServerApp and not c.IdentityProvider.token: + token = hexlify(os.urandom(4)).decode("ascii") + c.IdentityProvider.token = token # Allow tests to configure root_dir via a file, argv, or its # default (cwd) by specifying a value of None. @@ -278,7 +334,14 @@ def _configurable_serverapp( app.log.propagate = True app.log.handlers = [] # Initialize app without httpserver - app.initialize(argv=argv, new_httpserver=False) + if asyncio_loop.is_running(): + app.initialize(argv=argv, new_httpserver=False) + else: + + async def initialize_app(): + app.initialize(argv=argv, new_httpserver=False) + + asyncio_loop.run_until_complete(initialize_app()) # Reroute all logging StreamHandlers away from stdin/stdout since pytest hijacks # these streams and closes them at unfortunate times. stream_handlers = [h for h in app.log.handlers if isinstance(h, logging.StreamHandler)] @@ -286,40 +349,14 @@ def _configurable_serverapp( handler.setStream(jp_logging_stream) app.log.propagate = True app.log.handlers = [] - # Start app without ioloop app.start_app() return app return _configurable_serverapp -@pytest.fixture -def jp_ensure_app_fixture(request): - """Ensures that the 'app' fixture used by pytest-tornasync - is set to `jp_web_app`, the Tornado Web Application returned - by the ServerApp in Jupyter Server, provided by the jp_web_app - fixture in this module. - - Note, this hardcodes the `app_fixture` option from - pytest-tornasync to `jp_web_app`. If this value is configured - to something other than the default, it will raise an exception. - """ - app_option = request.config.getoption("app_fixture") - if app_option not in ["app", "jp_web_app"]: - raise Exception( - "jp_serverapp requires the `app-fixture` option " - "to be set to 'jp_web_app`. Try rerunning the " - "current tests with the option `--app-fixture " - "jp_web_app`." - ) - elif app_option == "app": - # Manually set the app_fixture to `jp_web_app` if it's - # not set already. - request.config.option.app_fixture = "jp_web_app" - - @pytest.fixture(scope="function") -def jp_serverapp(jp_ensure_app_fixture, jp_server_config, jp_argv, jp_configurable_serverapp): +def jp_serverapp(jp_server_config, jp_argv, jp_configurable_serverapp): """Starts a Jupyter Server instance based on the established configuration values.""" return jp_configurable_serverapp(config=jp_server_config, argv=jp_argv) @@ -482,24 +519,13 @@ def inner(nbpath): @pytest.fixture(autouse=True) -def jp_server_cleanup(io_loop): +def jp_server_cleanup(asyncio_loop): yield app: ServerApp = ServerApp.instance() - loop = io_loop.asyncio_loop - loop.run_until_complete(app._cleanup()) + asyncio_loop.run_until_complete(app._cleanup()) ServerApp.clear_instance() -@pytest.fixture -def jp_cleanup_subprocesses(jp_serverapp): - """DEPRECATED: The jp_server_cleanup fixture automatically cleans up the singleton ServerApp class""" - - async def _(): - pass - - return _ - - @pytest.fixture def send_request(jp_fetch, jp_ws_fetch): """Send to Jupyter Server and return response code.""" diff --git a/jupyter_server/serverapp.py b/jupyter_server/serverapp.py index 42d432284e..9e59c08533 100644 --- a/jupyter_server/serverapp.py +++ b/jupyter_server/serverapp.py @@ -36,8 +36,9 @@ from jinja2 import Environment, FileSystemLoader from jupyter_core.paths import secure_write +from jupyter_server.services.kernels.handlers import ZMQChannelsHandler from jupyter_server.transutils import _i18n, trans -from jupyter_server.utils import pathname2url, run_sync_in_loop, urljoin +from jupyter_server.utils import ensure_async, pathname2url, urljoin # the minimum viable tornado version: needs to be kept in sync with setup.py MIN_TORNADO = (6, 1, 0) @@ -2358,7 +2359,11 @@ def init_httpserver(self): max_buffer_size=self.max_buffer_size, ) - success = self._bind_http_server() + # binding sockets must be called from inside an event loop + self.io_loop.add_callback(self._bind_http_server) + + def _bind_http_server(self): + success = self._bind_http_server_unix() if self.sock else self._bind_http_server_tcp() if not success: self.log.critical( _i18n( @@ -2368,9 +2373,6 @@ def init_httpserver(self): ) self.exit(1) - def _bind_http_server(self): - return self._bind_http_server_unix() if self.sock else self._bind_http_server_tcp() - def _bind_http_server_unix(self): if unix_socket_in_use(self.sock): self.log.warning(_i18n("The socket %s is already in use.") % self.sock) @@ -2499,6 +2501,10 @@ def initialize( self._preferred_dir_validation(self.preferred_dir, self.root_dir) if self._dispatching: return + # initialize io loop as early as possible, + # so configurables, extensions may reference the event loop + self.init_ioloop() + # Then, use extensions' config loading mechanism to # update config. ServerApp config takes precedence. if find_extensions: @@ -2524,7 +2530,6 @@ def initialize( self.init_components() self.init_webapp() self.init_signal() - self.init_ioloop() self.load_server_extensions() self.init_mime_overrides() self.init_shutdown_no_activity() @@ -2544,7 +2549,7 @@ async def cleanup_kernels(self): "Shutting down %d kernel", "Shutting down %d kernels", n_kernels ) self.log.info(kernel_msg % n_kernels) - await run_sync_in_loop(self.kernel_manager.shutdown_all()) + await ensure_async(self.kernel_manager.shutdown_all()) async def cleanup_extensions(self): """Call shutdown hooks in all extensions.""" @@ -2555,7 +2560,7 @@ async def cleanup_extensions(self): "Shutting down %d extension", "Shutting down %d extensions", n_extensions ) self.log.info(extension_msg % n_extensions) - await run_sync_in_loop(self.extension_manager.stop_all_extensions()) + await ensure_async(self.extension_manager.stop_all_extensions()) def running_server_info(self, kernel_count=True): "Return the current working directory and the server url information" @@ -2829,8 +2834,14 @@ async def _cleanup(self): self.remove_browser_open_files() await self.cleanup_extensions() await self.cleanup_kernels() + await ZMQChannelsHandler.close_all() + if getattr(self, "kernel_manager", None): + self.kernel_manager.__del__() if getattr(self, "session_manager", None): self.session_manager.close() + if hasattr(self, "http_server"): + # Stop a server if its set. + self.http_server.stop() def start_ioloop(self): """Start the IO Loop.""" @@ -2864,7 +2875,7 @@ async def _stop(self): def stop(self, from_signal=False): """Cleanup resources and stop the server.""" - if hasattr(self, "_http_server"): + if hasattr(self, "http_server"): # Stop a server if its set. self.http_server.stop() if getattr(self, "io_loop", None): diff --git a/jupyter_server/services/kernels/handlers.py b/jupyter_server/services/kernels/handlers.py index d932dfbc6c..32c2e80eee 100644 --- a/jupyter_server/services/kernels/handlers.py +++ b/jupyter_server/services/kernels/handlers.py @@ -6,8 +6,10 @@ # Distributed under the terms of the Modified BSD License. import asyncio import json +import weakref from textwrap import dedent from traceback import format_tb +from typing import MutableSet from jupyter_client import protocol_version as client_protocol_version @@ -128,10 +130,17 @@ class ZMQChannelsHandler(AuthenticatedZMQStreamHandler): # allows checking for conflict on session-id, # which is used as a zmq identity and must be unique. _open_sessions: dict = {} + _open_sockets: MutableSet["ZMQChannelsHandler"] = weakref.WeakSet() _kernel_info_future: Future _close_future: Future + @classmethod + async def close_all(cls): + """Tornado does not provide a way to close open sockets, so add one.""" + for socket in list(cls._open_sockets): + await socket.close() + @property def kernel_info_timeout(self): km_default = self.kernel_manager.kernel_info_timeout @@ -500,7 +509,7 @@ def subscribe(value): stream.on_recv_stream(self._on_zmq_reply) connected.add_done_callback(subscribe) - + ZMQChannelsHandler._open_sockets.add(self) return connected def on_message(self, ws_msg): @@ -740,6 +749,7 @@ def on_close(self): # start buffering instead of closing if this was the last connection if km._kernel_connections[self.kernel_id] == 0: km.start_buffering(self.kernel_id, self.session_key, self.channels) + ZMQChannelsHandler._open_sockets.remove(self) self._close_future.set_result(None) return @@ -752,7 +762,11 @@ def on_close(self): stream.close() self.channels = {} - self._close_future.set_result(None) + try: + ZMQChannelsHandler._open_sockets.remove(self) + self._close_future.set_result(None) + except Exception: + pass def _send_status_message(self, status): iopub = self.channels.get("iopub", None) diff --git a/jupyter_server/services/kernels/kernelmanager.py b/jupyter_server/services/kernels/kernelmanager.py index 6cf9dfc4c0..e280959be3 100644 --- a/jupyter_server/services/kernels/kernelmanager.py +++ b/jupyter_server/services/kernels/kernelmanager.py @@ -194,7 +194,7 @@ async def _remove_kernel_when_ready(self, kernel_id, kernel_awaitable): self._kernel_connections.pop(kernel_id, None) self._kernel_ports.pop(kernel_id, None) - async def start_kernel(self, kernel_id=None, path=None, **kwargs): + async def _async_start_kernel(self, kernel_id=None, path=None, **kwargs): """Start a kernel for a session and return its kernel_id. Parameters @@ -215,7 +215,7 @@ async def start_kernel(self, kernel_id=None, path=None, **kwargs): kwargs["cwd"] = self.cwd_for_path(path) if kernel_id is not None: kwargs["kernel_id"] = kernel_id - kernel_id = await ensure_async(self.pinned_superclass.start_kernel(self, **kwargs)) + kernel_id = await self.pinned_superclass._async_start_kernel(self, **kwargs) self._kernel_connections[kernel_id] = 0 task = asyncio.create_task(self._finish_kernel_start(kernel_id)) if not getattr(self, "use_pending_kernels", None): @@ -243,6 +243,8 @@ async def start_kernel(self, kernel_id=None, path=None, **kwargs): return kernel_id + start_kernel = _async_start_kernel + async def _finish_kernel_start(self, kernel_id): km = self.get_kernel(kernel_id) if hasattr(km, "ready"): @@ -390,7 +392,7 @@ def stop_buffering(self, kernel_id): buffer_info["session_key"], ) - def shutdown_kernel(self, kernel_id, now=False, restart=False): + async def _async_shutdown_kernel(self, kernel_id, now=False, restart=False): """Shutdown a kernel by kernel_id""" self._check_kernel_id(kernel_id) @@ -401,16 +403,20 @@ def shutdown_kernel(self, kernel_id, now=False, restart=False): if kernel_id in self._pending_kernel_tasks: task = self._pending_kernel_tasks.pop(kernel_id) task.cancel() - else: - self.stop_watching_activity(kernel_id) - self.stop_buffering(kernel_id) - self.pinned_superclass.shutdown_kernel(self, kernel_id, now=now, restart=restart) + self.stop_watching_activity(kernel_id) + self.stop_buffering(kernel_id) + + return await self.pinned_superclass._async_shutdown_kernel( + self, kernel_id, now=now, restart=restart + ) - async def restart_kernel(self, kernel_id, now=False): + shutdown_kernel = _async_shutdown_kernel + + async def _async_restart_kernel(self, kernel_id, now=False): """Restart a kernel by kernel_id""" self._check_kernel_id(kernel_id) - await ensure_async(self.pinned_superclass.restart_kernel(self, kernel_id, now=now)) + await self.pinned_superclass._async_restart_kernel(self, kernel_id, now=now) kernel = self.get_kernel(kernel_id) # return a Future that will resolve when the kernel has successfully restarted channel = kernel.connect_shell() @@ -422,6 +428,7 @@ def finish(): channel.close() loop.remove_timeout(timeout) kernel.remove_restart_callback(on_restart_failed, "dead") + kernel._pending_restart_cleanup = None def on_reply(msg): self.log.debug("Kernel info reply received: %s", kernel_id) @@ -442,6 +449,7 @@ def on_restart_failed(): future.set_exception(RuntimeError("Restart failed")) kernel.add_restart_callback(on_restart_failed, "dead") + kernel._pending_restart_cleanup = finish kernel.session.send(channel, "kernel_info_request") channel.on_recv(on_reply) loop = IOLoop.current() @@ -452,6 +460,8 @@ def on_restart_failed(): self.start_watching_activity(kernel_id) return future + restart_kernel = _async_restart_kernel + def notify_connect(self, kernel_id): """Notice a new connection to a kernel""" if kernel_id in self._kernel_connections: @@ -547,6 +557,8 @@ def stop_watching_activity(self, kernel_id): if not kernel._activity_stream.socket.closed: kernel._activity_stream.close() kernel._activity_stream = None + if getattr(kernel, "_pending_restart_cleanup", None): + kernel._pending_restart_cleanup() def initialize_culler(self): """Start idle culler if 'cull_idle_timeout' is greater than zero. @@ -647,27 +659,7 @@ def _default_kernel_manager_class(self): return "jupyter_client.ioloop.AsyncIOLoopKernelManager" def __init__(self, **kwargs): - self.pinned_superclass = AsyncMultiKernelManager + self.pinned_superclass = MultiKernelManager + self._pending_kernel_tasks = {} self.pinned_superclass.__init__(self, **kwargs) self.last_kernel_activity = utcnow() - self._pending_kernel_tasks = {} - - async def shutdown_kernel(self, kernel_id, now=False, restart=False): - """Shutdown a kernel by kernel_id""" - self._check_kernel_id(kernel_id) - - # Decrease the metric of number of kernels - # running for the relevant kernel type by 1 - KERNEL_CURRENTLY_RUNNING_TOTAL.labels(type=self._kernels[kernel_id].kernel_name).dec() - - if kernel_id in self._pending_kernel_tasks: - task = self._pending_kernel_tasks.pop(kernel_id) - task.cancel() - else: - self.stop_watching_activity(kernel_id) - self.stop_buffering(kernel_id) - - # Finish shutting down the kernel before clearing state to avoid a race condition. - return await self.pinned_superclass.shutdown_kernel( - self, kernel_id, now=now, restart=restart - ) diff --git a/jupyter_server/utils.py b/jupyter_server/utils.py index 039749482e..5823321e7a 100644 --- a/jupyter_server/utils.py +++ b/jupyter_server/utils.py @@ -1,13 +1,13 @@ """Notebook related utilities""" # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. -import asyncio import errno import importlib.util import inspect import os import socket import sys +import warnings from contextlib import contextmanager from urllib.parse import urljoin # noqa: F401 from urllib.parse import SplitResult, quote, unquote, urlparse, urlsplit, urlunsplit @@ -190,73 +190,12 @@ async def ensure_async(obj): return obj -def run_sync(maybe_async): - """If async, runs maybe_async and blocks until it has executed, - possibly creating an event loop. - If not async, just returns maybe_async as it is the result of something - that has already executed. - - Parameters - ---------- - maybe_async : async or non-async object - The object to be executed, if it is async. - - Returns - ------- - result - Whatever the async object returns, or the object itself. - """ - if not inspect.isawaitable(maybe_async): - # that was not something async, just return it - return maybe_async - # it is async, we need to run it in an event loop - - def wrapped(): - create_new_event_loop = False - try: - loop = asyncio.get_event_loop() - except RuntimeError: - create_new_event_loop = True - else: - if loop.is_closed(): - create_new_event_loop = True - if create_new_event_loop: - loop = asyncio.new_event_loop() - asyncio.set_event_loop(loop) - try: - result = loop.run_until_complete(maybe_async) - except RuntimeError as e: - if str(e) == "This event loop is already running": - # just return a Future, hoping that it will be awaited - result = asyncio.ensure_future(maybe_async) - else: - raise e - return result - - return wrapped() - - async def run_sync_in_loop(maybe_async): - """Runs a function synchronously whether it is an async function or not. - - If async, runs maybe_async and blocks until it has executed. - - If not async, just returns maybe_async as it is the result of something - that has already executed. - - Parameters - ---------- - maybe_async : async or non-async object - The object to be executed, if it is async. - - Returns - ------- - result - Whatever the async object returns, or the object itself. - """ - if not inspect.isawaitable(maybe_async): - return maybe_async - return await maybe_async + """**DEPRECATED**: Use ``ensure_async`` instead.""" + warnings.warn( + "run_sync_in_loop is deprecated, use 'ensure_async'", DeprecationWarning, stacklevel=2 + ) + return ensure_async(maybe_async) def urlencode_unix_socket_path(socket_path): diff --git a/pyproject.toml b/pyproject.toml index aec6ef9505..a55d1a0860 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -20,12 +20,12 @@ classifiers = [ "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3 :: Only", - "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", ] -requires-python = ">=3.7" +requires-python = ">=3.8" dependencies = [ "anyio>=3.1.0,<4", "argon2-cffi", @@ -134,18 +134,14 @@ addopts = "-raXs --durations 10 --color=yes --doctest-modules" testpaths = [ "tests/" ] -timeout = 300 +timeout = 100 # Restore this setting to debug failures # timeout_method = "thread" filterwarnings = [ "error", - "module:make_current is deprecated:DeprecationWarning", - "module:clear_current is deprecated:DeprecationWarning", - "module:There is no current event loop:DeprecationWarning", "ignore:Passing a schema to Validator.iter_errors:DeprecationWarning", - "ignore:unclosed