Skip to content

Commit

Permalink
Merge pull request #379 from KulwantSinghYadav/blog_writer_human_in_t…
Browse files Browse the repository at this point in the history
…he_loop

Add human in the loop for blog writer demo
  • Loading branch information
20001LastOrder authored May 23, 2024
2 parents 70f9900 + 0795538 commit 1165aa9
Show file tree
Hide file tree
Showing 3 changed files with 58 additions and 10 deletions.
9 changes: 8 additions & 1 deletion demo/blog_writer/agent_config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@ google_search:
role_description: Act as a question answering agent
task: Question answering
llm: ${llm}
include_metadata: true
config: ${agent_config}

citation_validation: # The tool used to validate and add citation to the answer
Expand All @@ -49,3 +48,11 @@ qa_agent:
# - ${doc_search}
validations:
- ${citation_validation}

#configurations for the user agent to bring human in the loop
user:
_target_: sherpa_ai.agents.user.UserAgent
shared_memory: ${shared_memory}
name: Human Expert
description: Review the paragraph provided by qa agent.
user_id: 1
49 changes: 46 additions & 3 deletions demo/blog_writer/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@

from hydra.utils import instantiate
from omegaconf import OmegaConf
from sherpa_ai.agents import QAAgent
from sherpa_ai.agents import QAAgent, UserAgent
from sherpa_ai.events import EventType

from outliner import Outliner
Expand Down Expand Up @@ -32,13 +32,37 @@ def get_qa_agent_from_config_file(
return qa_agent



def get_user_agent_from_config_file(
config_path: str,
) -> UserAgent:
"""
Create a UserAgent from a config file.
Args:
config_path: Path to the config file
Returns:
UserAgent: A UserAgent instance
"""

config = OmegaConf.load(config_path)

agent_config = instantiate(config.agent_config)
user: UserAgent = instantiate(config.user)

return user



if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument("--config", type=str, default="agent_config.yaml")
parser.add_argument("--transcript", type=str, default="transcript.txt")
args = parser.parse_args()

writer_agent = get_qa_agent_from_config_file(args.config)
reviewer_agent = get_user_agent_from_config_file(args.config)

outliner = Outliner(args.transcript)
blueprint = outliner.full_transcript2outline_json(verbose=True)
Expand All @@ -52,8 +76,8 @@ def get_qa_agent_from_config_file(
with open("blueprint.json", "w") as f:
f.write(pure_json_str)

# with open("blueprint_manual.json", "r") as f:
# pure_json_str = f.read()
#with open("blueprint.json", "r") as f:
# pure_json_str = f.read()

parsed_json = json.loads(pure_json_str)

Expand All @@ -67,6 +91,25 @@ def get_qa_agent_from_config_file(
for evidence in evidences:
writer_agent.shared_memory.add(EventType.task, "human", evidence)
result = writer_agent.run()


reviewer_input= "\n" + "Please review the paragraph generated below. Type 'yes', 'y' or simply press Enter \
if everything looks good. Else provide feedback on how you would like the paragraph modified." \
+ "\n\n" + result
reviewer_agent.shared_memory.add(EventType.task, "human", reviewer_input)

decision = reviewer_agent.run()
decision_event= reviewer_agent.shared_memory.get_by_type(EventType.result)
decision_content=decision_event[-1].content

if decision_content == []:
break
#
if decision_content.lower() in ["yes", "y", ""]:
pass
else:
writer_agent.shared_memory.add(EventType.task, "human", decision_content)
result = writer_agent.run()
# writer_agent.belief = Belief()
blog += f"{result}\n"

Expand Down
10 changes: 4 additions & 6 deletions demo/blog_writer/outliner.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,17 +3,15 @@

import tiktoken
from langchain.chat_models import ChatOpenAI
from langchain.prompts.chat import (
ChatPromptTemplate,
HumanMessagePromptTemplate,
SystemMessagePromptTemplate,
)
from langchain.prompts.chat import (ChatPromptTemplate,
HumanMessagePromptTemplate,
SystemMessagePromptTemplate)
from langchain.text_splitter import MarkdownTextSplitter


class Outliner:
def __init__(self, transcript_file) -> None:
with open(transcript_file, "r") as f:
with open(transcript_file, "r", encoding="utf-8") as f:
self.raw_transcript = f.read()
# instantiate chat model
self.chat = ChatOpenAI(
Expand Down

0 comments on commit 1165aa9

Please sign in to comment.