Skip to content

Commit

Permalink
better chat interface, added answer generation bot and fixed previous…
Browse files Browse the repository at this point in the history
…ly mysteriously deleted files
  • Loading branch information
SubhadityaMukherjee committed Aug 23, 2024
1 parent ccf0e63 commit d8a19ab
Show file tree
Hide file tree
Showing 14 changed files with 56 additions and 2,559 deletions.
2 changes: 1 addition & 1 deletion backend/config.json
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
"search_type" : "similarity",
"reranking" : false,
"long_context_reorder" : false,
"structured_query": true,
"structured_query": false,
"use_chroma_for_saving_metadata": false,
"chunk_size": 1000,
"chroma_metadata_dir": "../data/chroma_db_metadata"
Expand Down
4 changes: 4 additions & 0 deletions frontend/paths.json
Original file line number Diff line number Diff line change
Expand Up @@ -10,5 +10,9 @@
"structured_query": {
"docker" : "http://fastapi:8081/structuredquery/",
"local" : "http://0.0.0.0:8081/structuredquery/"
},
"documentation_query": {
"docker" : "http://fastapi:8083/documentationquery/",
"local" : "http://0.0.0.0:8083/documentationquery/"
}
}
8 changes: 6 additions & 2 deletions frontend/ui.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,13 +29,17 @@
config_path = Path("../backend/config.json")
ui_loader = UILoader(config_path)

query_type = st.selectbox("Select Query Type", ["Dataset", "Flow"])
llm_filter = st.toggle("LLM Filter")
# Chat input box
user_input = ui_loader.chat_entry()

ui_loader.create_chat_interface(None)
query_type = st.selectbox("Select Query Type", ["General Query","Dataset", "Flow"], key="query_type_2")
llm_filter = st.toggle("LLM Filter")
# Chat interface
if user_input:
ui_loader.create_chat_interface(
user_input, query_type=query_type, llm_filter=llm_filter
)
ui_loader.query_type = st.selectbox("Select Query Type", ["General Query","Dataset", "Flow"], key="query_type_3")
ui_loader.llm_filter = st.toggle("LLM Filter", key="llm_filter_2")

41 changes: 34 additions & 7 deletions frontend/ui_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,9 +41,11 @@ def display_results(initial_response):
"""
Description: Display the results in a DataFrame
"""
st.write("OpenML Agent: ")
st.dataframe(initial_response)

# st.write("OpenML Agent: ")
try:
st.dataframe(initial_response)
except:
st.write(initial_response)

class LLMResponseParser:
"""
Expand Down Expand Up @@ -145,6 +147,22 @@ def fetch_llm_response(self, query: str):
f"{llm_response_path['local']}{query}"
).json()
return self.llm_response

def fetch_documentation_query(self, query: str):
"""
Description: Fetch the response for a general or documentation or code query from the LLM service as a JSON
"""
documentation_response_path = self.paths["documentation_query"]
try:
self.documentation_response = requests.get(
f"{documentation_response_path['docker']}{query}",
json={"query": query},
).json()
except:
self.documentation_response = requests.get(
f"{documentation_response_path['local']}{query}",
json={"query": query},
).json()

def fetch_structured_query(self, query_type: str, query: str):
"""
Expand Down Expand Up @@ -369,13 +387,17 @@ def chat_entry(self):
self.chatbot_display, max_chars=self.chatbot_input_max_chars
)

def create_chat_interface(self, user_input, query_type, llm_filter):
def create_chat_interface(self, user_input, query_type=None, llm_filter=None):
"""
Description: Create the chat interface and display the chat history and results. Show the user input and the response from the OpenML Agent.
"""
self.query_type = query_type
self.llm_filter = llm_filter
if user_input is None:
with st.chat_message(name = "ai"):
st.write("OpenML Agent: ", "Hello! How can I help you today?")

# Handle user input
if user_input:
st.session_state.messages.append({"role": "user", "content": user_input})
Expand All @@ -389,9 +411,11 @@ def create_chat_interface(self, user_input, query_type, llm_filter):
# Display chat history
for message in st.session_state.messages:
if message["role"] == "user":
st.write(f"You: {message['content']}")
with st.chat_message(name = "user"):
display_results(message["content"])
else:
display_results(message["content"])
with st.chat_message(name = "ai"):
display_results(message["content"])

# Function to handle query processing
def process_query_chat(self, query):
Expand All @@ -404,7 +428,7 @@ def process_query_chat(self, query):
self.query_type, apply_llm_before_rag=apply_llm_before_rag
)

if self.query_type == "Dataset":
if self.query_type == "Dataset" or self.query_type == "Flow":
if config["structured_query"]:
# get structured query
response_parser.fetch_structured_query(self.query_type, query)
Expand Down Expand Up @@ -439,3 +463,6 @@ def process_query_chat(self, query):

results = response_parser.parse_and_update_response(self.data_metadata)
return results
elif self.query_type == "General Query":
response_parser.fetch_documentation_query(query)
return response_parser.documentation_response
1 change: 0 additions & 1 deletion openml_information_bot/README.md

This file was deleted.

Empty file removed openml_information_bot/__init__.py
Empty file.
Empty file.
26 changes: 0 additions & 26 deletions openml_information_bot/main.py

This file was deleted.

202 changes: 0 additions & 202 deletions openml_information_bot/utils.py

This file was deleted.

1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@ urllib3 = "1.26.19"
uvicorn = "0.29.0"
poetry = "1.8.3"
protobuf = "3.20.0"
langchain-ollama = "0.1.1"


[build-system]
Expand Down
4 changes: 4 additions & 0 deletions start_local.sh
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,10 @@ else
echo $! > $PID_FILE
fi

cd ../documentation_bot || exit
uvicorn documentation_query:app --host 0.0.0.0 --port 8083 &
echo $! >> $PID_FILE

cd ../backend || exit
uvicorn backend:app --host 0.0.0.0 --port 8000 &
echo $! >> $PID_FILE
Expand Down
3 changes: 3 additions & 0 deletions stop_local.sh
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,9 @@ cd ../frontend
# streamlit run ui.py &
kill -9 $(cat $PID_FILE)

cd ../documentation_bot
kill -9 $(cat $PID_FILE)

cd ..

killall ollama
Expand Down
Loading

0 comments on commit d8a19ab

Please sign in to comment.