From 323f27b9048713cdbab31995265975842a937167 Mon Sep 17 00:00:00 2001 From: Cyrus Leung Date: Tue, 7 May 2024 00:31:05 +0800 Subject: [PATCH] [Bugfix] Fix `asyncio.Task` not being subscriptable (#4623) --- vllm/engine/async_llm_engine.py | 6 +++--- vllm/entrypoints/openai/api_server.py | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/vllm/engine/async_llm_engine.py b/vllm/engine/async_llm_engine.py index 9f72a0d11974f..37a2dc77a3b50 100644 --- a/vllm/engine/async_llm_engine.py +++ b/vllm/engine/async_llm_engine.py @@ -1,8 +1,8 @@ import asyncio import time from functools import partial -from typing import (Any, AsyncIterator, Callable, Dict, Iterable, List, - Optional, Set, Tuple, Type, Union) +from typing import (AsyncIterator, Callable, Dict, Iterable, List, Optional, + Set, Tuple, Type, Union) from transformers import PreTrainedTokenizer @@ -327,7 +327,7 @@ def __init__(self, # We need to keep a reference to unshielded # task as well to prevent it from being garbage # collected - self._background_loop_unshielded: Optional[asyncio.Task[Any]] = None + self._background_loop_unshielded: Optional[asyncio.Task] = None self.start_engine_loop = start_engine_loop self._errored_with: Optional[BaseException] = None diff --git a/vllm/entrypoints/openai/api_server.py b/vllm/entrypoints/openai/api_server.py index f9e294af47253..44a946f2e32d4 100644 --- a/vllm/entrypoints/openai/api_server.py +++ b/vllm/entrypoints/openai/api_server.py @@ -4,7 +4,7 @@ import re from contextlib import asynccontextmanager from http import HTTPStatus -from typing import Any, Set +from typing import Set import fastapi import uvicorn @@ -34,7 +34,7 @@ openai_serving_completion: OpenAIServingCompletion logger = init_logger(__name__) -_running_tasks: Set[asyncio.Task[Any]] = set() +_running_tasks: Set[asyncio.Task] = set() @asynccontextmanager