Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add user proxy docs. Make user proxy's default impl cancellable #4459

Merged
merged 23 commits into from
Dec 6, 2024
Merged
Show file tree
Hide file tree
Changes from 10 commits
Commits
Show all changes
23 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
from inspect import iscoroutinefunction
from typing import Awaitable, Callable, List, Optional, Sequence, Union, cast

from aioconsole import ainput # type: ignore
from autogen_core.base import CancellationToken

from ..base import Response
Expand All @@ -14,6 +15,15 @@
InputFuncType = Union[SyncInputFunc, AsyncInputFunc]


# TODO: ainput doesn't seem to play nicely with jupyter.
# No input window appears in this case.
async def cancellable_input(prompt: str, cancellation_token: Optional[CancellationToken]) -> str:
task = asyncio.create_task(ainput(prompt)) # type: ignore
peterychang marked this conversation as resolved.
Show resolved Hide resolved
if cancellation_token is not None:
cancellation_token.link_future(task) # type: ignore
return await task # type: ignore


class UserProxyAgent(BaseChatAgent):
peterychang marked this conversation as resolved.
Show resolved Hide resolved
"""An agent that can represent a human user through an input function.

Expand All @@ -40,6 +50,53 @@ class UserProxyAgent(BaseChatAgent):

See `Pause for User Input <https://microsoft.github.io/autogen/dev/user-guide/agentchat-user-guide/tutorial/teams.html#pause-for-user-input>`_ for more information.

Example:
Simple usage case::

import asyncio
from autogen_agentchat.agents import UserProxyAgent

agent = UserProxyAgent("user_proxy")
response = await asyncio.create_task(
agent.on_messages(
[TextMessage(content="What is your name? ", source="user")],
cancellation_token=token,
)
)
print(f"Your name is {response.chat_message.content}")
peterychang marked this conversation as resolved.
Show resolved Hide resolved

Example:
Cancellable usage case::

import asyncio
from autogen_agentchat.agents import UserProxyAgent


async def timeout(delay):
await asyncio.sleep(delay)


def cancellation_callback(task):
token.cancel()


token = CancellationToken()
agent = UserProxyAgent("user_proxy")
try:
timeout_task = asyncio.create_task(timeout(3))
timeout_task.add_done_callback(cancellation_callback)
agent_task = asyncio.create_task(
agent.on_messages(
[TextMessage(content="What is your name? ", source="user")],
cancellation_token=token,
)
)
response = await agent_task
print(f"Your name is {response.chat_message.content}")
except Exception as e:
print(f"Exception: {e}")
except BaseException as e:
print(f"BaseException: {e}")
"""

def __init__(
Expand All @@ -51,7 +108,7 @@ def __init__(
) -> None:
"""Initialize the UserProxyAgent."""
super().__init__(name=name, description=description)
self.input_func = input_func or input
self.input_func = input_func or cancellable_input
peterychang marked this conversation as resolved.
Show resolved Hide resolved
self._is_async = iscoroutinefunction(self.input_func)

@property
Expand Down
peterychang marked this conversation as resolved.
Show resolved Hide resolved
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import json
import logging
from typing import Any, List, Dict
from .... import TRACE_LOGGER_NAME
from typing import Any, Dict, List

from autogen_core.base import AgentId, CancellationToken, MessageContext
from autogen_core.components import DefaultTopicId, Image, event, rpc
Expand All @@ -12,8 +11,10 @@
UserMessage,
)

from .... import TRACE_LOGGER_NAME
from ....base import Response, TerminationCondition
from ....messages import AgentMessage, MultiModalMessage, StopMessage, TextMessage, ChatMessage
from ....messages import AgentMessage, ChatMessage, MultiModalMessage, StopMessage, TextMessage
from .._base_group_chat_manager import BaseGroupChatManager
from .._events import (
GroupChatAgentResponse,
GroupChatMessage,
Expand All @@ -22,7 +23,6 @@
GroupChatStart,
GroupChatTermination,
)
from .._base_group_chat_manager import BaseGroupChatManager
from ._prompts import (
ORCHESTRATOR_FINAL_ANSWER_PROMPT,
ORCHESTRATOR_PROGRESS_LEDGER_PROMPT,
Expand Down
peterychang marked this conversation as resolved.
Show resolved Hide resolved
Original file line number Diff line number Diff line change
Expand Up @@ -104,6 +104,48 @@
"as well as a list of inner messages in the {py:attr}`~autogen_agentchat.base.Response.inner_messages` attribute,\n",
"which stores the agent's \"thought process\" that led to the final response.\n",
"\n",
"## User Proxy Agent\n",
"\n",
"{py:class}`~autogen_agentchat.agents.UserProxyAgent` is a built-in agent that\n",
"provides one way for a user to intervene in the process. This agent will put the team in a temporary blocking state, and thus any exceptions or runtime failures while in the blocked state will result in a deadlock. It is strongly advised that a this agent be coupled with a timeout mechanic and that all errors and exceptions emanating from it are handled."
peterychang marked this conversation as resolved.
Show resolved Hide resolved
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [
{
"ename": "NameError",
"evalue": "name 'TextMessage' is not defined",
"output_type": "error",
"traceback": [
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)",
"Cell \u001b[0;32mIn[1], line 6\u001b[0m\n\u001b[1;32m 3\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mautogen_agentchat\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01magents\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m UserProxyAgent\n\u001b[1;32m 5\u001b[0m user_proxy_agent \u001b[38;5;241m=\u001b[39m UserProxyAgent(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124muser_proxy\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[0;32m----> 6\u001b[0m response \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mawait\u001b[39;00m asyncio\u001b[38;5;241m.\u001b[39mcreate_task(user_proxy_agent\u001b[38;5;241m.\u001b[39mon_messages([\u001b[43mTextMessage\u001b[49m(content\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mWhat is your name? \u001b[39m\u001b[38;5;124m\"\u001b[39m, source\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124muser\u001b[39m\u001b[38;5;124m\"\u001b[39m)], cancellation_token\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mNone\u001b[39;00m))\n\u001b[1;32m 8\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mYour name is \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mresponse\u001b[38;5;241m.\u001b[39mchat_message\u001b[38;5;241m.\u001b[39mcontent\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m)\n",
"\u001b[0;31mNameError\u001b[0m: name 'TextMessage' is not defined"
]
}
],
"source": [
"import asyncio\n",
"\n",
"from autogen_agentchat.agents import UserProxyAgent\n",
"\n",
"user_proxy_agent = UserProxyAgent(\"user_proxy\")\n",
"response = await asyncio.create_task(\n",
" user_proxy_agent.on_messages([TextMessage(content=\"What is your name? \", source=\"user\")], cancellation_token=None)\n",
")\n",
"\n",
"print(f\"Your name is {response.chat_message.content}\")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"The User Proxy agent is ideally used for on-demand human-in-the-loop interactions for scenarios such as Just In Time approvals, human feedback, alerts, etc. For slower user interactions, consider terminating the session using a termination condition and start another one from run or run_stream with another message.\n",
"\n",
"### Stream Messages\n",
"\n",
"We can also stream each message as it is generated by the agent by using the\n",
Expand Down Expand Up @@ -251,7 +293,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.5"
"version": "3.12.3"
}
},
"nbformat": 4,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -95,59 +95,6 @@
"# Use asyncio.run(run_countdown_agent()) when running in a script.\n",
"await run_countdown_agent()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## UserProxyAgent \n",
"\n",
"A common use case for building a custom agent is to create an agent that acts as a proxy for the user.\n",
"\n",
"In the example below we show how to implement a `UserProxyAgent` - an agent that asks the user to enter\n",
"some text through console and then returns that message as a response."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import asyncio\n",
"from typing import List, Sequence\n",
"\n",
"from autogen_agentchat.agents import BaseChatAgent\n",
"from autogen_agentchat.base import Response\n",
"from autogen_agentchat.messages import ChatMessage\n",
"from autogen_core.base import CancellationToken\n",
"\n",
"\n",
"class UserProxyAgent(BaseChatAgent):\n",
" def __init__(self, name: str) -> None:\n",
" super().__init__(name, \"A human user.\")\n",
"\n",
" @property\n",
" def produced_message_types(self) -> List[type[ChatMessage]]:\n",
" return [TextMessage]\n",
"\n",
" async def on_messages(self, messages: Sequence[ChatMessage], cancellation_token: CancellationToken) -> Response:\n",
" user_input = await asyncio.get_event_loop().run_in_executor(None, input, \"Enter your response: \")\n",
" return Response(chat_message=TextMessage(content=user_input, source=self.name))\n",
"\n",
" async def on_reset(self, cancellation_token: CancellationToken) -> None:\n",
" pass\n",
"\n",
"\n",
"async def run_user_proxy_agent() -> None:\n",
" user_proxy_agent = UserProxyAgent(name=\"user_proxy_agent\")\n",
" response = await user_proxy_agent.on_messages([], CancellationToken())\n",
" print(response.chat_message.content)\n",
"\n",
"\n",
"# Use asyncio.run(run_user_proxy_agent()) when running in a script.\n",
"await run_user_proxy_agent()"
]
}
],
"metadata": {
Expand Down
1 change: 1 addition & 0 deletions python/packages/autogen-core/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ classifiers = [
dependencies = [
"openai>=1.3",
"pillow",
"aioconsole",
peterychang marked this conversation as resolved.
Show resolved Hide resolved
"aiohttp",
"typing-extensions",
"pydantic<3.0.0,>=2.0.0",
Expand Down
11 changes: 11 additions & 0 deletions python/uv.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Loading