Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/develop-ai'
Browse files Browse the repository at this point in the history
  • Loading branch information
Borikhs committed May 6, 2024
2 parents 21304b8 + 32d3af8 commit e13a1ce
Show file tree
Hide file tree
Showing 11 changed files with 51 additions and 37 deletions.
3 changes: 2 additions & 1 deletion ai/.gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -5,4 +5,5 @@
*.pyc
__pycache__
data/
cap30.pem
cap30.pem
chatbot.pem
9 changes: 9 additions & 0 deletions ai/deepl_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
import deepl

auth_key = 'd31a50f4-1d48-4f54-bbad-c6c63cdcffb3:fx'
translator = deepl.Translator(auth_key)

message = '안녕하세요. 맛있는 점심 드세요~'
result = translator.translate_text(message, target_lang='KO')

print(result.text)
Binary file modified ai/llm/__pycache__/llm_rag.cpython-312.pyc
Binary file not shown.
Binary file modified ai/llm/__pycache__/llm_rag.cpython-39.pyc
Binary file not shown.
15 changes: 15 additions & 0 deletions ai/llm/deepl_translator.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
import deepl

class DeeplTranslator:
def __init__(self):
API_KEY = 'd31a50f4-1d48-4f54-bbad-c6c63cdcffb3:fx'
self.translator = deepl.Translator(API_KEY)

def fuck(self, message):
result = self.translator.translate_text(message, 'ko')
return result.text


if __name__ == '__main__':
dl = DeeplTranslator()
dl.fuck()
35 changes: 17 additions & 18 deletions ai/llm/llm_rag.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,9 @@
from langchain_core.runnables import RunnablePassthrough, RunnableLambda
from langchain_openai import ChatOpenAI
from tavily import TavilyClient
from llm.prompt import casual_prompt, is_qna_prompt, combine_result_prompt, score_prompt, translate_prompt
from llm.prompt import casual_prompt, is_qna_prompt, combine_result_prompt, score_prompt
from langchain.retrievers.multi_query import MultiQueryRetriever
import deepl
import os

class LLM_RAG:
Expand All @@ -18,7 +19,9 @@ def __init__(self, trace = False):
self.is_qna_prompt = is_qna_prompt()
self.combine_result_prompt = combine_result_prompt()
self.score_prompt = score_prompt()
self.translate_prompt = translate_prompt()
self.deepl = deepl.Translator(os.getenv("DEEPL_API_KEY"))
self.ko_query = None
self.result_lang = None
self.notice_retriever = None
self.school_retriever = None
self.notice_multiquery_retriever = None
Expand Down Expand Up @@ -94,41 +97,37 @@ def set_chain(self):
self.score_route
)

self.translate_chain = (
self.translate_prompt
| self.llm
| StrOutputParser()
)

def qna_route(self, info):
if "question" in info["topic"].lower():

self.result = self.rag_combine_chain.invoke(info["question"])
score = self.score_chain.invoke({"question" : self.question, "answer": self.result})
self.score_invoke_chain.invoke({"score" : score, "question": self.question})
self.result = self.rag_combine_chain.invoke(self.ko_query)
score = self.score_chain.invoke({"question" : self.ko_query, "answer": self.result})
self.score_invoke_chain.invoke({"score" : score, "question": self.ko_query})

elif "casual" in info["topic"].lower():
self.result = self.casual_answer_chain.invoke(info['question'])
self.result = self.casual_answer_chain.invoke(self.question)

else:
self.result = self.rag_combine_chain.invoke(info["question"])
self.result = self.rag_combine_chain.invoke(self.question)


def score_route(self, info):
if "good" in info["score"].lower():
self.result = self.deepl.translate_text(self.result, target_lang=self.result_lang).text
return self.result
else:
print('-- google search --')
content = self.tavily.qna_search(query='국민대학교 ' + self.question)
self.result = "답을 찾을 수 없어서 구글에 검색했습니다.\n\n"
self.result += self.translate_chain.invoke({'content' : content, 'question':self.question})
return self.result
content = self.tavily.qna_search(query='국민대학교 ' + self.ko_query)
self.result = "I couldn't find the answer, so I searched on Google.\n\n" + content
self.result = self.deepl.translate_text(self.result, target_lang=self.result_lang).text

def format_docs(self, docs):
# 검색한 문서 결과를 하나의 문단으로 합쳐줍니다.
return "\n\n".join(doc.page_content + '\nmetadata=' + str(doc.metadata) for doc in docs)

def query(self, question):
def query(self, question, result_lang):
self.question = question
self.ko_query = self.deepl.translate_text(self.question, target_lang='ko').text
self.result_lang = result_lang
self.qna_route_chain.invoke(question)
return self.result
9 changes: 0 additions & 9 deletions ai/llm/prompt.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,13 +39,4 @@ def score_prompt():
Classification:"""
)
return prompt

def translate_prompt():
prompt = PromptTemplate.from_template("""
You are a translator with vast knowledge of human languages. Translate the content into the language corresponding to the question. You should only translate and never answer questions.
question : {question}
content : {content}
result :""")
return prompt
3 changes: 2 additions & 1 deletion ai/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,5 @@ langchainhub==0.1.15
beautifulsoup4==4.12.3
pypdf==4.1.0
python-dotenv==1.0.1
langchain-text-splitters==0.0.1
langchain-text-splitters==0.0.1
deepl
3 changes: 2 additions & 1 deletion ai/run_chatbot.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
load_dotenv()

os.environ['OPENAI_API_KEY'] = os.getenv('OPENAI_API_KEY')
os.environ['DEEPL_API_KEY'] = os.getenv('DEEPL_API_KEY')

# LangSmith 사용시 아래 주석을 해제
# os.environ['LANGCHAIN_API_KEY'] = os.getenv('LANGCHAIN_API_KEY')
Expand Down Expand Up @@ -40,5 +41,5 @@
if q == str(0):
break
print('AI : ', end='')
print(llm.query(q))
print(llm.query(q, 'ZH'))
print()
11 changes: 4 additions & 7 deletions ai/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@

class Query(BaseModel):
query: str
target_lang: str

@asynccontextmanager
async def lifespan(app:FastAPI):
Expand Down Expand Up @@ -58,18 +59,14 @@ async def lifespan(app:FastAPI):
async def initiate():
return "안녕하세요! 국민대학교 전용 챗봇 KUKU입니다. 국민대학교에 대한 건 모든 질문해주세요!"

@app.post("/query")
@app.post("/api/chatbot")
async def query(query: Query):
return {'code': '200',
return {'success': 'True',
'message': 'success',
'response': {
'answer': llm.query(query.query)
'answer': llm.query(query.query, query.target_lang)
}}

@app.post("/input")
async def input(data: UploadFile):
vdb.add_content(data.file)
return


if __name__ == "__main__":
Expand Down
Binary file modified ai/vectordb/__pycache__/vector_db.cpython-39.pyc
Binary file not shown.

0 comments on commit e13a1ce

Please sign in to comment.