Skip to content

Commit

Permalink
Merge pull request #29 from merefield/openai_function_agent
Browse files Browse the repository at this point in the history
FEATURE: Implement Agent Behaviour
  • Loading branch information
merefield authored Aug 18, 2023
2 parents ef57098 + 0524745 commit ed6e90d
Show file tree
Hide file tree
Showing 22 changed files with 798 additions and 109 deletions.
9 changes: 8 additions & 1 deletion app/jobs/regular/chatbot_reply_job.rb
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,14 @@ def execute(opts)
if create_bot_reply
::DiscourseChatbot.progress_debug_message("4. Retrieving new reply message...")
begin
bot = ::DiscourseChatbot::OpenAIBot.new
# agent can only be used currently with 0613 series model
if SiteSetting.chatbot_bot_type == "agent" &&
(["gpt-3.5-turbo-0613", "gpt-4-0613"].include?(SiteSetting.chatbot_open_ai_model) ||
SiteSetting.chatbot_open_ai_model_custom)
bot = ::DiscourseChatbot::OpenAIAgent.new
else
bot = ::DiscourseChatbot::OpenAIBot.new
end
message_body = bot.ask(opts)
rescue => e
Rails.logger.error ("OpenAIBot: There was a problem, but will retry til limit: #{e}")
Expand Down
12 changes: 9 additions & 3 deletions config/locales/server.en.yml
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
en:
site_settings:
chatbot_enabled: "Enable the chatbot plugin"
chatbot_open_ai_token: "Your Open AI token. You can get one at <a target='_blank' rel='noopener' href='https://platform.openai.com/account/api-keys/'>openai.com</a>"
chatbot_azure_open_ai_model_url: "Populate it if you want to use Azure OpenAI, e.g. https://custom-domain.openai.azure.com/openai/deployments/custom-name)."
chatbot_azure_open_ai_token: "Your Azure Open AI token. Required if you supply Azure URL. You can get one at <a target='_blank' rel='noopener' href='https://portal.azure.com/'>Azure Portal</a>."
chatbot_open_ai_token: "Your Open AI token. You can get one at <a target='_blank' rel='noopener' href='https://platform.openai.com/account/api-keys/'>openai.com</a>."
chatbot_bot_type: "EXPERIMENTAL: To make the bot smarter, use 'agent' (but be aware this uses many more calls to the LLM increasing cost significantly!)"
chatbot_open_ai_model_custom: "Use Custom model name (ADVANCED USERS ONLY)"
chatbot_open_ai_model_custom_name: "(CUSTOM ONLY) Name of model"
chatbot_open_ai_model_custom_type: "(CUSTOM ONLY) IMPORTANT: Select which type of Open AI endpoint should be used, Chat (ChatGPT style), or Completion (Davinci style) - will break if you select wrong type"
Expand Down Expand Up @@ -32,11 +33,16 @@ en:
chatbot_request_presence_penalty: "Numeric value within the range of -200 to 200. Refer to: <a target='_blank' rel='noopener' href='https://platform.openai.com/docs/api-reference/completions/create#completions/create-presence_penalty'>API docs: presence_penalty</a>"
chatbot_max_response_tokens: "Specify the maximum amount of tokens bot will use to generate a response. Refer to: <a target='_blank' rel='noopener' href='https://openai.com/pricing'>OpenAI: Pricing</a>"
chatbot_enable_verbose_console_response_progress_logging: "Enable response retrieval progress logging to console to help debug issues"
chatbot_news_api_token: "News API token for news (if left blank, news will never be searched)<a target='_blank' rel='noopener' href='https://newsapi.org/'>Get one at NewsAPI.org</a>"
chatbot_serp_api_key: "Serp API token for google search (if left blank, google will never be searched). <a target='_blank' rel='noopener' href='https://serpapi.com/'>Get one at SerpAPI.com</a>"
chatbot_marketstack_key: "Marketstack API key for stock price information (if left blank, Marketstack will never be queried).<a target='_blank' rel='noopener' href='https://marketstack.com/'>Get one at MarketStack.com</a>"

chatbot:
bio: "Hi, Im not a real person. Im a bot that can discuss things with you. Don't take me too seriously. Sometimes, I'm even right about stuff!"
bio: "Hi, I'm not a real person. I'm a bot that can discuss things with you. Don't take me too seriously. Sometimes, I'm even right about stuff!"
prompt:
system: "You are a helpful assistant."
system:
basic: "You are a helpful assistant."
agent: "You are a helpful assistant. You have great tools in the form of functions that give you the power to get newer information. Only use the functions you have been provided with. The current date and time is %{current_date_time}. Only respond to the last question, using the prior information as context, if appropriate."
title: "The subject of this conversation is %{topic_title}"
first_post: "The first thing someone said was %{username} who said %{raw}"
post: "%{username} said %{raw}"
Expand Down
33 changes: 21 additions & 12 deletions config/settings.yml
Original file line number Diff line number Diff line change
Expand Up @@ -67,36 +67,36 @@ plugins:
choices:
- gpt-3.5-turbo
- gpt-3.5-turbo-16k
- gpt-3.5-turbo-0613
- gpt-4
- gpt-4-32k
- text-davinci-003
- text-davinci-002
- gpt-4-0613
chatbot_reply_job_time_delay:
client: false
default: 3
min: 1
chatbot_open_ai_token:
client: false
default: ''
chatbot_bot_type:
default: 'normal'
client: false
type: enum
choices:
- normal
- agent
chatbot_azure_open_ai_token:
client: false
default: ''
chatbot_open_ai_model_custom:
default: false
client: false
chatbot_azure_open_ai_model_url:
client: false
default: ''
chatbot_open_ai_model_custom:
default: false
client: false
chatbot_open_ai_model_custom_name:
default: ''
client: false
chatbot_open_ai_model_custom_type:
default: 'chat'
client: false
type: enum
choices:
- chat
- completions
chatbot_request_temperature:
client: false
default: 100
Expand Down Expand Up @@ -127,3 +127,12 @@ plugins:
chatbot_enable_verbose_console_response_progress_logging:
client: false
default: false
chatbot_news_api_token:
client: false
default: ''
chatbot_serp_api_key:
client: false
default: ''
chatbot_marketstack_key:
client: false
default: ''
173 changes: 173 additions & 0 deletions lib/discourse_chatbot/bots/open_ai_agent.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,173 @@
# frozen_string_literal: true
require "openai"

module ::DiscourseChatbot

class OpenAIAgent < Bot

def initialize
if SiteSetting.chatbot_azure_open_ai_model_url.include?("azure")
::OpenAI.configure do |config|
config.access_token = SiteSetting.chatbot_azure_open_ai_token
config.uri_base = SiteSetting.chatbot_azure_open_ai_model_url
config.api_type = :azure
config.api_version = "2023-05-15"
end
@client = ::OpenAI::Client.new
else
@client = ::OpenAI::Client.new(access_token: SiteSetting.chatbot_open_ai_token)
end

@model_name = SiteSetting.chatbot_open_ai_model_custom ? SiteSetting.chatbot_open_ai_model_custom_name : SiteSetting.chatbot_open_ai_model

calculator_function = ::DiscourseChatbot::CalculatorFunction.new
wikipedia_function = ::DiscourseChatbot::WikipediaFunction.new
news_function = ::DiscourseChatbot::NewsFunction.new
google_search_function = ::DiscourseChatbot::GoogleSearchFunction.new
stock_data_function = ::DiscourseChatbot::StockDataFunction.new
functions = [calculator_function, wikipedia_function]

functions << news_function if !SiteSetting.chatbot_news_api_token.blank?
functions << google_search_function if !SiteSetting.chatbot_serp_api_key.blank?
functions << stock_data_function if !SiteSetting.chatbot_marketstack_key.blank?

@functions = parse_functions(functions)
@func_mapping = create_func_mapping(functions)
@chat_history = []
end

def parse_functions(functions)
return nil if functions.nil?
functions.map { |func| ::DiscourseChatbot::Parser.func_to_json(func) }
end

def create_func_mapping(functions)
return {} if functions.nil?
functions.each_with_object({}) { |func, mapping| mapping[func.name] = func }
end

def create_chat_completion(messages, use_functions = true)
::DiscourseChatbot.progress_debug_message <<~EOS
I called the LLM to help me
------------------------------
value of messages is: #{messages}
+++++++++++++++++++++++++++++++
EOS
if use_functions && @functions
res = @client.chat(
parameters: {
model: @model_name,
messages: messages,
functions: @functions
}
)
else
res = @client.chat(
parameters: {
model: @model_name,
messages: messages
}
)
end
::DiscourseChatbot.progress_debug_message <<~EOS
+++++++++++++++++++++++++++++++++++++++
The llm responded with
#{res}
+++++++++++++++++++++++++++++++++++++++
EOS
res
end

def generate_response
iteration = 1
::DiscourseChatbot.progress_debug_message <<~EOS
===============================
# New Query
-------------------------------
EOS
loop do
::DiscourseChatbot.progress_debug_message <<~EOS
# Iteration: #{iteration}
-------------------------------
EOS
res = create_chat_completion(@chat_history + @internal_thoughts)
finish_reason = res["choices"][0]["finish_reason"]

if finish_reason == 'stop' || @internal_thoughts.length > 5
final_thought = final_thought_answer
final_res = create_chat_completion(
@chat_history + [final_thought],
false
)
# pp final_res
return final_res
elsif finish_reason == 'function_call'
handle_function_call(res)
else
raise "Unexpected finish reason: #{finish_reason}"
end
iteration += 1
end
end

def handle_function_call(res)
first_message = res["choices"][0]["message"]
@internal_thoughts << first_message.to_hash
func_name = first_message["function_call"]["name"]
args_str = first_message["function_call"]["arguments"]
result = call_function(func_name, args_str)
res_msg = { 'role' => 'assistant', 'content' => "The answer is #{result}." }
@internal_thoughts << res_msg
end

def call_function(func_name, args_str)
::DiscourseChatbot.progress_debug_message <<~EOS
+++++++++++++++++++++++++++++++++++++++
I used '#{func_name}' to help me
+++++++++++++++++++++++++++++++++++++++
EOS
begin
args = JSON.parse(args_str)
func = @func_mapping[func_name]
res = func.process(args)
res
rescue
"There was something wrong with your function arguments"
end
end

def final_thought_answer
thoughts = "To answer the question I will use these step by step instructions.\n\n"
@internal_thoughts.each do |thought|
if thought.key?('function_call')
thoughts += "I will use the #{thought['function_call']['name']} function to calculate the answer with arguments #{thought['function_call']['arguments']}.\n\n"
else
thoughts += "#{thought['content']}\n\n"
end
end
final_thought = {
'role' => 'assistant',
'content' => "#{thoughts} Based on the above, I will now answer the question, this message will only be seen by me so answer with the assumption with that the user has not seen this message."
}
final_thought
end

def get_response(prompt)
system_message = { "role": "system", "content": I18n.t("chatbot.prompt.system.agent", current_date_time: DateTime.current) }
prompt.unshift(system_message)

@internal_thoughts = []

@chat_history += prompt

res = generate_response

@chat_history << res["choices"][0]["message"].to_hash
res["choices"][0]["message"]["content"]
end

def ask(opts)
super(opts)
end
end
end
67 changes: 21 additions & 46 deletions lib/discourse_chatbot/bots/open_ai_bot.rb
Original file line number Diff line number Diff line change
Expand Up @@ -20,56 +20,31 @@ def initialize
end

def get_response(prompt)
system_message = { "role": "system", "content": I18n.t("chatbot.prompt.system.basic") }
prompt.unshift(system_message)

model_name = SiteSetting.chatbot_open_ai_model_custom ? SiteSetting.chatbot_open_ai_model_custom_name : SiteSetting.chatbot_open_ai_model

if ["gpt-3.5-turbo", "gpt-3.5-turbo-16k", "gpt-4", "gpt-4-32k"].include?(SiteSetting.chatbot_open_ai_model) ||
(SiteSetting.chatbot_open_ai_model_custom == true && SiteSetting.chatbot_open_ai_model_custom_type == "chat")
response = @client.chat(
parameters: {
model: model_name,
messages: prompt,
max_tokens: SiteSetting.chatbot_max_response_tokens,
temperature: SiteSetting.chatbot_request_temperature / 100.0,
top_p: SiteSetting.chatbot_request_top_p / 100.0,
frequency_penalty: SiteSetting.chatbot_request_frequency_penalty / 100.0,
presence_penalty: SiteSetting.chatbot_request_presence_penalty / 100.0
})

if response["error"]
begin
raise StandardError, response["error"]["message"]
rescue => e
Rails.logger.error("OpenAIBot: There was a problem: #{e}")
I18n.t('chatbot.errors.general')
end
else
response.dig("choices", 0, "message", "content")
end
elsif (SiteSetting.chatbot_open_ai_model_custom == true && SiteSetting.chatbot_open_ai_model_custom_type == "completions") ||
["text-davinci-003", "text-davinci-002"].include?(SiteSetting.chatbot_open_ai_model)

response = @client.completions(
parameters: {
model: SiteSetting.chatbot_open_ai_model,
prompt: prompt,
max_tokens: SiteSetting.chatbot_max_response_tokens,
temperature: SiteSetting.chatbot_request_temperature / 100.0,
top_p: SiteSetting.chatbot_request_top_p / 100.0,
frequency_penalty: SiteSetting.chatbot_request_frequency_penalty / 100.0,
presence_penalty: SiteSetting.chatbot_request_presence_penalty / 100.0
})

if response["error"]
begin
raise StandardError, response["error"]["message"]
rescue => e
Rails.logger.error("OpenAIBot: There was a problem: #{e}")
I18n.t('chatbot.errors.general')
end
else
response["choices"][0]["text"]
response = @client.chat(
parameters: {
model: model_name,
messages: prompt,
max_tokens: SiteSetting.chatbot_max_response_tokens,
temperature: SiteSetting.chatbot_request_temperature / 100.0,
top_p: SiteSetting.chatbot_request_top_p / 100.0,
frequency_penalty: SiteSetting.chatbot_request_frequency_penalty / 100.0,
presence_penalty: SiteSetting.chatbot_request_presence_penalty / 100.0
})

if response["error"]
begin
raise StandardError, response["error"]["message"]
rescue => e
Rails.logger.error("OpenAIBot: There was a problem: #{e}")
I18n.t('chatbot.errors.general')
end
else
response.dig("choices", 0, "message", "content")
end
end

Expand Down
Loading

0 comments on commit ed6e90d

Please sign in to comment.