Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Cleanup preset configuration #287

Closed
wants to merge 24 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,6 @@ export AZURE_OPENAI_VERSION = ...
export AZURE_OPENAI_DEPLOYMENT = ...
export AZURE_OPENAI_EMBEDDINGS_DEPLOYMENT = ...
```

Note: your Azure endpoint must support functions or you will get an error. See https://github.com/cpacker/MemGPT/issues/91 for more information.

#### Custom Endpoints
Expand Down
8 changes: 4 additions & 4 deletions memgpt/autogen/memgpt_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,11 +129,11 @@ def create_autogen_memgpt_agent(
)

memgpt_agent = presets.use_preset(
preset,
# preset,
agent_config,
model,
persona_description,
user_description,
# model,
# persona_description,
# user_description,
interface,
persistence_manager,
)
Expand Down
4 changes: 0 additions & 4 deletions memgpt/cli/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,11 +137,7 @@ def run(

# create agent
memgpt_agent = presets.use_preset(
agent_config.preset,
agent_config,
agent_config.model,
utils.get_persona_text(agent_config.persona),
utils.get_human_text(agent_config.human),
memgpt.interface,
persistence_manager,
)
Expand Down
9 changes: 7 additions & 2 deletions memgpt/cli/cli_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
def configure():
"""Updates default MemGPT configurations"""

from memgpt.presets import DEFAULT_PRESET, preset_options
from memgpt.presets import DEFAULT_PRESET, preset_options, preset_map, SYNC_CHAT

MemGPTConfig.create_config_dir()

Expand Down Expand Up @@ -78,7 +78,12 @@ def configure():
default_endpoint = questionary.select("Select default endpoint:", endpoint_options).ask()

# configure preset
default_preset = questionary.select("Select default preset:", preset_options, default=DEFAULT_PRESET).ask()
default_preset = questionary.select(
"Select default preset:",
[preset.pretty_name for preset in preset_map.values() if preset.name != SYNC_CHAT],
default=preset_map[DEFAULT_PRESET].pretty_name,
).ask()
default_preset = [preset.name for preset in preset_map.values() if preset.pretty_name == default_preset][0]

# default model
if use_openai or use_azure:
Expand Down
24 changes: 16 additions & 8 deletions memgpt/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -333,15 +333,23 @@ async def main(
chosen_human = cfg.human_persona
chosen_persona = cfg.memgpt_persona

memgpt_agent = presets.use_preset(
presets.DEFAULT_PRESET,
None, # no agent config to provide
cfg.model,
personas.get_persona_text(*chosen_persona),
humans.get_human_text(*chosen_human),
memgpt.interface,
persistence_manager,
temp_agent_config = AgentConfig(
model=cfg.model,
persona=chosen_persona,
human=chosen_human,
persistence_manager=persistence_manager,
preset=presets.DEFAULT_PRESET,
)
memgpt_agent = presets.use_preset(temp_agent_config, memgpt.interface, persistence_manager)
# memgpt_agent = presets.use_preset(
# presets.DEFAULT_PRESET,
# None, # no agent config to provide
# cfg.model,
# personas.get_persona_text(*chosen_persona),
# humans.get_human_text(*chosen_human),
# memgpt.interface,
# persistence_manager,
# )
print_messages = memgpt.interface.print_messages
await print_messages(memgpt_agent.messages)

Expand Down
156 changes: 73 additions & 83 deletions memgpt/presets.py
Original file line number Diff line number Diff line change
@@ -1,41 +1,89 @@
from .prompts import gpt_functions
from .prompts import gpt_system

DEFAULT_PRESET = "memgpt_chat"
preset_options = [DEFAULT_PRESET]
from typing import List, Optional, Tuple
from dataclasses import dataclass, field

DEFAULT_PRESET = "memgpt_chat"
preset_options = [DEFAULT_PRESET] # TODO: eventually remove
SYNC_CHAT = "memgpt_chat_sync" # TODO: remove me after we move the CLI to AgentSync


def use_preset(preset_name, agent_config, model, persona, human, interface, persistence_manager):
@dataclass
class Preset:
name: str = None
pretty_name: str = None
functions: Tuple[str] = ()
sync: bool = False # TODO: remove as a preset


@dataclass
class DefaultPreset(Preset):
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think this refactor is generally the right idea, however I wonder what the best way is to support users adding their own presets, eg: #282

I feel like adding presets should ideally be as easy as adding personas/humans or should have a similar workflow, where you create a text file and put it in a folder.

Thoughts?

name: str = DEFAULT_PRESET
pretty_name: str = "Default"
functions: Tuple[str] = (
"send_message",
"pause_heartbeats",
"core_memory_append",
"core_memory_replace",
"conversation_search",
"conversation_search_date",
"archival_memory_insert",
"archival_memory_search",
)


@dataclass
class SyncPreset(DefaultPreset): # TODO: get rid of this
name: str = SYNC_CHAT
pretty_name: str = "Sync Chat"
sync: bool = True


@dataclass
class ExtrasPreset(DefaultPreset):
name: str = "memgpt_extras"
pretty_name: str = "Extras (read/write to file)"

def __init__(self):
self.functions = super().functions + ("read_from_text_file", "append_to_text_file", "http_request")


presets = [DefaultPreset(), ExtrasPreset(), SyncPreset()]
preset_map = {preset.name: preset for preset in presets}


# def use_preset(preset_name, agent_config, model, persona, human, interface, persistence_manager):
def use_preset(agent_config, interface, persistence_manager):
"""Storing combinations of SYSTEM + FUNCTION prompts"""

from memgpt.agent import AgentAsync, Agent
from memgpt.utils import printd

if preset_name == DEFAULT_PRESET:
functions = [
"send_message",
"pause_heartbeats",
"core_memory_append",
"core_memory_replace",
"conversation_search",
"conversation_search_date",
"archival_memory_insert",
"archival_memory_search",
]
available_functions = [v for k, v in gpt_functions.FUNCTIONS_CHAINING.items() if k in functions]
printd(f"Available functions:\n", [x["name"] for x in available_functions])
assert len(functions) == len(available_functions)

if "gpt-3.5" in model:
# use a different system message for gpt-3.5
preset_name = "memgpt_gpt35_extralong"
from memgpt.utils import printd, get_human_text, get_persona_text

# read config values
preset_name = agent_config.preset
model = agent_config.model
persona = get_persona_text(agent_config.persona)
human = get_human_text(agent_config.human)

# setup functions
assert preset_name in preset_map, f"Invalid preset name: {preset_name}"
preset = preset_map[preset_name]
functions = preset.functions
available_functions = [v for k, v in gpt_functions.FUNCTIONS_CHAINING.items() if k in functions]
printd(f"Available functions:\n", [x["name"] for x in available_functions])
assert len(functions) == len(available_functions)

# get system text name
system_text = "memgpt_gpt35_extralong" if "gpt-3.5" in model else preset_name

print(gpt_system.get_system_text(system_text), persona, human)

if preset_name != "memgpt_chat_sync":
return AgentAsync(
config=agent_config,
model=model,
system=gpt_system.get_system_text(preset_name),
system=gpt_system.get_system_text(system_text),
functions=available_functions,
interface=interface,
persistence_manager=persistence_manager,
Expand All @@ -44,26 +92,7 @@ def use_preset(preset_name, agent_config, model, persona, human, interface, pers
# gpt-3.5-turbo tends to omit inner monologue, relax this requirement for now
first_message_verify_mono=True if "gpt-4" in model else False,
)

elif preset_name == "memgpt_chat_sync": # TODO: remove me after we move the CLI to AgentSync
functions = [
"send_message",
"pause_heartbeats",
"core_memory_append",
"core_memory_replace",
"conversation_search",
"conversation_search_date",
"archival_memory_insert",
"archival_memory_search",
]
available_functions = [v for k, v in gpt_functions.FUNCTIONS_CHAINING.items() if k in functions]
printd(f"Available functions:\n", [x["name"] for x in available_functions])
assert len(functions) == len(available_functions)

if "gpt-3.5" in model:
# use a different system message for gpt-3.5
preset_name = "memgpt_gpt35_extralong"

else:
return Agent(
config=agent_config,
model=model,
Expand All @@ -76,42 +105,3 @@ def use_preset(preset_name, agent_config, model, persona, human, interface, pers
# gpt-3.5-turbo tends to omit inner monologue, relax this requirement for now
first_message_verify_mono=True if "gpt-4" in model else False,
)

elif preset_name == "memgpt_extras":
functions = [
"send_message",
"pause_heartbeats",
"core_memory_append",
"core_memory_replace",
"conversation_search",
"conversation_search_date",
"archival_memory_insert",
"archival_memory_search",
# extra for read/write to files
"read_from_text_file",
"append_to_text_file",
# internet access
"http_request",
]
available_functions = [v for k, v in gpt_functions.FUNCTIONS_CHAINING.items() if k in functions]
printd(f"Available functions:\n", [x["name"] for x in available_functions])
assert len(functions) == len(available_functions)

if "gpt-3.5" in model:
# use a different system message for gpt-3.5
preset_name = "memgpt_gpt35_extralong"

return AgentAsync(
model=model,
system=gpt_system.get_system_text("memgpt_chat"),
functions=available_functions,
interface=interface,
persistence_manager=persistence_manager,
persona_notes=persona,
human_notes=human,
# gpt-3.5-turbo tends to omit inner monologue, relax this requirement for now
first_message_verify_mono=True if "gpt-4" in model else False,
)

else:
raise ValueError(preset_name)
Loading