Skip to content

Commit

Permalink
Feature/enable intuitive logs summarization (Significant-Gravitas#3697)
Browse files Browse the repository at this point in the history
  • Loading branch information
waynehamadi authored May 3, 2023
1 parent 26c6cfe commit e21917c
Show file tree
Hide file tree
Showing 3 changed files with 29 additions and 11 deletions.
17 changes: 8 additions & 9 deletions autogpt/llm/chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,15 +8,8 @@
from autogpt.llm.base import Message
from autogpt.llm.llm_utils import create_chat_completion
from autogpt.llm.token_counter import count_message_tokens
from autogpt.log_cycle.log_cycle import PROMPT_NEXT_ACTION_FILE_NAME
from autogpt.log_cycle.log_cycle import CURRENT_CONTEXT_FILE_NAME
from autogpt.logs import logger
from autogpt.memory_management.store_memory import (
save_memory_trimmed_from_context_window,
)
from autogpt.memory_management.summary_memory import (
get_newly_trimmed_messages,
update_running_summary,
)

cfg = Config()

Expand Down Expand Up @@ -153,6 +146,10 @@ def chat_with_ai(

# Move to the next most recent message in the full message history
next_message_to_add_index -= 1
from autogpt.memory_management.summary_memory import (
get_newly_trimmed_messages,
update_running_summary,
)

# Insert Memories
if len(full_message_history) > 0:
Expand All @@ -164,7 +161,9 @@ def chat_with_ai(
current_context=current_context,
last_memory_index=agent.last_memory_index,
)

agent.summary_memory = update_running_summary(
agent,
current_memory=agent.summary_memory,
new_events=newly_trimmed_messages,
)
Expand Down Expand Up @@ -237,7 +236,7 @@ def chat_with_ai(
agent.created_at,
agent.cycle_count,
current_context,
PROMPT_NEXT_ACTION_FILE_NAME,
CURRENT_CONTEXT_FILE_NAME,
)

# TODO: use a model defined elsewhere, so that model can contain
Expand Down
4 changes: 3 additions & 1 deletion autogpt/log_cycle/log_cycle.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,10 @@

DEFAULT_PREFIX = "agent"
FULL_MESSAGE_HISTORY_FILE_NAME = "full_message_history.json"
PROMPT_NEXT_ACTION_FILE_NAME = "prompt_next_action.json"
CURRENT_CONTEXT_FILE_NAME = "current_context.json"
NEXT_ACTION_FILE_NAME = "next_action.json"
PROMPT_SUMMARY_FILE_NAME = "prompt_summary.json"
SUMMARY_FILE_NAME = "summary.txt"


class LogCycleHandler:
Expand Down
19 changes: 18 additions & 1 deletion autogpt/memory_management/summary_memory.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,10 @@
import json
from typing import Dict, List, Tuple

from autogpt.agent import Agent
from autogpt.config import Config
from autogpt.llm.llm_utils import create_chat_completion
from autogpt.log_cycle.log_cycle import PROMPT_SUMMARY_FILE_NAME, SUMMARY_FILE_NAME

cfg = Config()

Expand Down Expand Up @@ -46,7 +48,7 @@ def get_newly_trimmed_messages(


def update_running_summary(
current_memory: str, new_events: List[Dict[str, str]]
agent: Agent, current_memory: str, new_events: List[Dict[str, str]]
) -> str:
"""
This function takes a list of dictionaries representing new events and combines them with the current summary,
Expand Down Expand Up @@ -110,9 +112,24 @@ def update_running_summary(
"content": prompt,
}
]
agent.log_cycle_handler.log_cycle(
agent.config.ai_name,
agent.created_at,
agent.cycle_count,
messages,
PROMPT_SUMMARY_FILE_NAME,
)

current_memory = create_chat_completion(messages, cfg.fast_llm_model)

agent.log_cycle_handler.log_cycle(
agent.config.ai_name,
agent.created_at,
agent.cycle_count,
current_memory,
SUMMARY_FILE_NAME,
)

message_to_return = {
"role": "system",
"content": f"This reminds you of these events from your past: \n{current_memory}",
Expand Down

0 comments on commit e21917c

Please sign in to comment.