Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: add example notebooks #1625

Merged
merged 1 commit into from
Aug 10, 2024
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
275 changes: 275 additions & 0 deletions examples/notebooks/multi_agent.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1,275 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": 31,
"id": "78fb59cf-89fd-4b30-8a1c-d1ae4bfd3daf",
"metadata": {},
"outputs": [],
"source": [
"from memgpt import create_client, Admin\n",
"from memgpt.client.client import LocalClient, RESTClient "
]
},
{
"cell_type": "code",
"execution_count": 32,
"id": "9269eda2-3108-4955-86ab-b406d51f562a",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"UUID('00000000-0000-0000-0000-000000000000')"
]
},
"execution_count": 32,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"client = create_client() \n",
"client.user_id"
]
},
{
"cell_type": "code",
"execution_count": 33,
"id": "879710d4-21c7-43ec-8d00-73e618f55693",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"ListModelsResponse(models=[LLMConfigModel(model='gpt-4o-mini', model_endpoint_type='openai', model_endpoint='https://api.openai.com/v1', model_wrapper=None, context_window=8192)])"
]
},
"execution_count": 33,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"client.list_models()"
]
},
{
"cell_type": "markdown",
"id": "af6ea8eb-fc6b-4de5-ae79-c2b684a81f17",
"metadata": {},
"source": [
"## Create a key from the Admin portal \n",
"(This is to allow viewing agents on the dev portal) "
]
},
{
"cell_type": "code",
"execution_count": 35,
"id": "715fa669-3fc6-4579-96a9-c4a730f43e29",
"metadata": {},
"outputs": [],
"source": [
"admin_client = Admin(base_url=\"http://localhost:8283\", token=\"memgptadmin\")"
]
},
{
"cell_type": "code",
"execution_count": 36,
"id": "1782934f-7884-4ee7-ad09-5ae33efa3b2e",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"CreateAPIKeyResponse(api_key='sk-45cc3e1fd35a3fac3a2ad959fc23877a0476181e8b0a5557')"
]
},
"execution_count": 36,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"admin_client.create_key(user_id=client.user_id, key_name=\"key\")"
]
},
{
"cell_type": "code",
"execution_count": 37,
"id": "b29bac8d-2a15-45de-a60d-6d94275443f5",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"MemGPT.memgpt.server.server - INFO - Created new agent from config: <memgpt.agent.Agent object at 0x14e542600>\n"
]
}
],
"source": [
"agent_state = client.create_agent()"
]
},
{
"cell_type": "markdown",
"id": "5fbc43c8-9536-4107-a64d-6e702083242b",
"metadata": {},
"source": [
"## Create an agent "
]
},
{
"cell_type": "code",
"execution_count": 38,
"id": "f0a388b5-2d00-4f3e-8a5b-b768da02ac8e",
"metadata": {},
"outputs": [],
"source": [
"def read_resume(self, name: str): \n",
" \"\"\"\n",
" Read the resume data for a candidate given the name\n",
"\n",
" Args: \n",
" name (str): Candidate name \n",
"\n",
" Returns: \n",
" resume_data (str): Candidate's resume data \n",
" \"\"\"\n",
" import os\n",
" filepath = os.path.join(\"data\", \"resumes\", name.lower().replace(\" \", \"_\") + \".txt\")\n",
" #print(\"read\", filepath)\n",
" return open(filepath).read()\n",
"\n",
"def submit_candidate_for_outreach(self, candidate_name: str, resume: str, justification: str): \n",
" \"\"\"\n",
" Submit a candidate for outreach. \n",
"\n",
" Args: \n",
" candidate_name (str): The name of the candidate\n",
" resume (str): The text representation of the candidate's resume \n",
" justification (str): Summary reason for why the candidate is good and should be reached out to\n",
" \"\"\"\n",
" from memgpt import create_client \n",
" client = create_client()\n",
" message = \"Reach out to the following candidate. \" \\\n",
" + f\"Name: {candidate_name}\\n\" \\\n",
" + f\"Resume Data: {resume}\\n\" \\\n",
" + f\"Justification: {justification}\"\n",
" # NOTE: we will define this agent later \n",
" #print(\"submit for outreach\", message)\n",
" response = client.send_message(agent_name=\"outreach_agent\", role=\"user\", message=message) # TODO: implement this\n",
" #print(respose.messages)\n",
"\n",
"# TODO: add an archival andidate tool (provide justification) \n",
"\n",
"read_resume_tool = client.create_tool(read_resume) \n",
"submit_candidate_tool = client.create_tool(submit_candidate_for_outreach)"
]
},
{
"cell_type": "code",
"execution_count": 39,
"id": "d2b0f66f-6cc3-471f-b2c7-49f51f5bbb7b",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"MemGPT.memgpt.server.server - INFO - Created new agent from config: <memgpt.agent.Agent object at 0x14e542600>\n"
]
}
],
"source": [
"from memgpt.memory import ChatMemory\n",
"\n",
"company_description = \"The company is called AgentOS and is building AI tools to make it easier to create and deploy LLM agents.\"\n",
"skills = \"Front-end (React, Typescript), software engineering (ideally Python), and experience with LLMs.\"\n",
"\n",
"\n",
"leadgen_agent = client.create_agent(\n",
" name=\"leadgen_agent\", \n",
" memory=ChatMemory(\n",
" persona=f\"You are responsible to finding good recruiting candidates, for the company description: {company_description}. \" \\\n",
" + f\"Ideal canddiates have skills: {skills}. \" \\\n",
" + \"Search for candidates by calling the `search_candidates_db` function. \" \\\n",
" + \"When you find a good candidate, submit the candidate for outreach with the `submit_candidate_for_outreach` tool. \" \\\n",
" + \"Continue to search through the database until there are no more entries. \",\n",
" human=\"\",\n",
" ), \n",
" tools=[read_resume_tool.name, submit_candidate_tool.name]\n",
")"
]
},
{
"cell_type": "markdown",
"id": "1f489784-dbc9-4c93-9181-457460b05401",
"metadata": {},
"source": [
"## Cleanup "
]
},
{
"cell_type": "code",
"execution_count": 23,
"id": "f93c330b-909a-4180-bf6b-166b951977a6",
"metadata": {},
"outputs": [],
"source": [
"agents = client.list_agents()"
]
},
{
"cell_type": "code",
"execution_count": 25,
"id": "523a382d-f514-46cb-a902-84ee74706f01",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Deleted FierceNucleus\n",
"Deleted LuxuriousRaccoon\n"
]
}
],
"source": [
"for agent in agents: \n",
" client.delete_agent(agent.id)\n",
" print(\"Deleted\", agent.name)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "e7f1a012-0080-4e68-b26f-7d139a37bad0",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "memgpt",
"language": "python",
"name": "memgpt"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.2"
}
},
"nbformat": 4,
"nbformat_minor": 5
}