diff --git a/.azure-template.env b/.azure-template.env index ec3f9bc..6414bed 100644 --- a/.azure-template.env +++ b/.azure-template.env @@ -6,7 +6,7 @@ EMBEDDINGS_DEPLOYMENT_NAME=embedding RABBITMQ_HOST=localhost RABBITMQ_USER=admin RABBITMQ_PASSWORD=super-secure-pass -AI_MODEL_TEMPERATURE=0.3 +AI_MODEL_TEMPERATURE=0.4 AI_SOURCE_WEBSITE=https://www.alkemio.org AI_SOURCE_WEBSITE2=https://welcome.alkem.io AI_LOCAL_PATH=~/alkemio/data diff --git a/Dockerfile b/Dockerfile index 26cd6f7..4787d4e 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,18 +1,19 @@ # Use an official Python runtime as a parent image -FROM python:3.11-slim-bookworm +ARG PYTHON_VERSION=3.11 +FROM python:${PYTHON_VERSION}-slim-bullseye as builder # Set the working directory in the container to /app WORKDIR /app -ARG GO_VERSION=1.21.5 -ARG HUGO_VERSION=0.121.1 -ARG ARCHITECTURE=amd64 +ARG GO_VERSION=1.21.6 +ARG HUGO_VERSION=0.121.2 +ARG TARGETARCH # install git, go and hugo RUN apt update && apt upgrade -y && apt install -y git wget -RUN wget https://go.dev/dl/go${GO_VERSION}.linux-${ARCHITECTURE}.tar.gz && tar -C /usr/local -xzf go${GO_VERSION}.linux-${ARCHITECTURE}.tar.gz +RUN wget https://go.dev/dl/go${GO_VERSION}.linux-${TARGETARCH}.tar.gz && tar -C /usr/local -xzf go${GO_VERSION}.linux-${TARGETARCH}.tar.gz RUN export PATH=$PATH:/usr/local/go/bin:/usr/local && go version -RUN wget https://github.com/gohugoio/hugo/releases/download/v${HUGO_VERSION}/hugo_extended_${HUGO_VERSION}_linux-${ARCHITECTURE}.tar.gz && tar -C /usr/local -xzf hugo_extended_${HUGO_VERSION}_linux-${ARCHITECTURE}.tar.gz && ls -al /usr/local +RUN wget https://github.com/gohugoio/hugo/releases/download/v${HUGO_VERSION}/hugo_extended_${HUGO_VERSION}_linux-${TARGETARCH}.tar.gz && tar -C /usr/local -xzf hugo_extended_${HUGO_VERSION}_linux-${TARGETARCH}.tar.gz && ls -al /usr/local RUN /usr/local/hugo version # Install Poetry @@ -24,5 +25,22 @@ COPY . /app # Use Poetry to install dependencies RUN poetry config virtualenvs.create true && poetry install --no-interaction --no-ansi -# Run guidance-engine.py when the container launches -CMD ["poetry", "run", "python", "guidance_engine.py"] +# Start a new, final stage +FROM python:${PYTHON_VERSION}-slim-bullseye + +WORKDIR /app + +# Install git and Poetry in the final stage +RUN apt update && apt install -y git && pip install poetry + +# Copy the compiled app, Hugo executable, Go, and the virtual environment from the previous stage +COPY --from=builder /app /app +COPY --from=builder /usr/local/hugo /usr/local/hugo +COPY --from=builder /usr/local/go /usr/local/go +COPY --from=builder /root/.cache/pypoetry/virtualenvs /root/.cache/pypoetry/virtualenvs + +# Add Hugo and Go to the PATH +ENV PATH="/usr/local/hugo:/usr/local/go/bin:${PATH}" + +# Run guidance_engine.py when the container launches +CMD ["poetry", "run", "python", "guidance_engine.py"] \ No newline at end of file diff --git a/ai_adapter.py b/ai_adapter.py index 8e5a766..f786fc0 100644 --- a/ai_adapter.py +++ b/ai_adapter.py @@ -1,9 +1,9 @@ -from langchain.embeddings import AzureOpenAIEmbeddings +from langchain_openai import AzureOpenAIEmbeddings from langchain.vectorstores import FAISS -from langchain.llms import AzureOpenAI +from langchain_openai import AzureOpenAI from langchain.prompts.prompt import PromptTemplate -from langchain.prompts import ChatPromptTemplate -from langchain.chat_models import AzureChatOpenAI +from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder, PromptTemplate +from langchain_openai import AzureChatOpenAI from langchain.schema import StrOutputParser from langchain_core.runnables import RunnableLambda, RunnablePassthrough from langchain.schema import format_document @@ -27,7 +27,7 @@ # Create handlers c_handler = logging.StreamHandler(io.TextIOWrapper(sys.stdout.buffer, line_buffering=True)) -f_handler = logging.FileHandler(os.path.join(os.path.expanduser(local_path),'app.log')) +f_handler = logging.FileHandler(os.path.join(os.path.expanduser(local_path), 'app.log')) c_handler.setLevel(level=getattr(logging, LOG_LEVEL)) f_handler.setLevel(logging.WARNING) @@ -45,7 +45,7 @@ logger.info(f"log level {os.path.basename(__file__)}: {LOG_LEVEL}") # verbose output for LLMs -if LOG_LEVEL=="DEBUG": +if LOG_LEVEL == "DEBUG": verbose_models = True else: verbose_models = False @@ -75,40 +75,56 @@ def get_language_by_code(language_code): return language_mapping.get(language_code, 'English') -chat_template = """ -You are a friendly conversational agent. Use the following step-by-step instructions to respond to user inputs. -1 - The text provided in the context delimited by triple pluses may contain questions. Remove those questions from the context. -2 - Provide an up to three paragraghs answer that is accurate and exthausive, taking into account the context delimited by triple pluses. - If the answer cannot be found within the context, write 'I could not find an answer to your question'. -3 - Only return the answer from step 2, do not show any code or additional information. -4 - If the question is in a different language than English, translate the question to English before answering. -5 - Answer the question in the {language} language. +chat_system_template = """ +You are a friendly and talkative conversational agent, tasked with answering questions about Alkemio. +Use the following step-by-step instructions to respond to user inputs: + +1 - If the question is in a different language than English, translate the question to English before answering. +2 - The text provided in the context delimited by triple pluses is retrieved from the Alkemio website is not part of the conversation with the user. +3 - Provide an answer of 250 words or less that is professional, engaging, accurate and exthausive, based on the context delimited by triple pluses. \ +If the answer cannot be found within the context, write 'Hmm, I am not sure'. +4 - Only return the answer from step 3, do not show any code or additional information. +5 - Answer the question in the {language} language. +++ -Context: +context: {context} +++ -Question: {question} """ condense_question_template = """" -Combine the chat history delimited by triple pluses and follow-up question into a single standalone question that does justice to the follow-up question. Do only return the standalone question, do not return any other information. +Create a single sentence standalone query based on the human input, using the following step-by-step instructions: + +1. If the human input is expressing a sentiment, delete and ignore the chat history delimited by triple pluses. \ +Then, return the human input containing the sentiment as the standalone query. Do NOT in any way respond to the human input, \ +simply repeat it. +2. Otherwise, combine the chat history delimited by triple pluses and human input into a single standalone query that does \ +justice to the human input. +3. Do only return the standalone query, do not return any other information. Never return the chat history delimited by triple pluses. +++ -Chat History: {chat_history} +chat history: +{chat_history} +++ -Follow-up question: {question} + +Human input: {question} --- -Standalone question: +Standalone query: """ condense_question_prompt = PromptTemplate.from_template(condense_question_template) -chat_prompt = ChatPromptTemplate.from_template(chat_template) +chat_prompt = ChatPromptTemplate.from_messages( + [ + ("system", chat_system_template), + MessagesPlaceholder(variable_name="chat_history"), + ("human", "{question}"), + ] +) generic_llm = AzureOpenAI(azure_deployment=os.environ["LLM_DEPLOYMENT_NAME"], - temperature=0, verbose=verbose_models) + temperature=0, verbose=verbose_models) embeddings = AzureOpenAIEmbeddings( azure_deployment=config['embeddings_deployment_name'], @@ -121,7 +137,7 @@ def load_vector_db(): Purpose: Load the data into the vector database. Args: - + Returns: vectorstore: the vectorstore object """ @@ -130,25 +146,35 @@ def load_vector_db(): logger.info(f"The file vector database is present") else: logger.info(f"The file vector database is not present, ingesting") - def_ingest.ingest(config['source_website'], config['website_repo'], website_generated_path, website_source_path, config['source_website2'], config['website_repo2'], website_generated_path2, website_source_path2) + def_ingest.ingest( + config['source_website'], + config['website_repo'], + website_generated_path, + website_source_path, + config['source_website2'], + config['website_repo2'], + website_generated_path2, + website_source_path2) return FAISS.load_local(vectordb_path, embeddings) + vectorstore = load_vector_db() retriever = vectorstore.as_retriever(search_type="similarity_score_threshold", search_kwargs={"score_threshold": .5}) chat_llm = AzureChatOpenAI(azure_deployment=os.environ["LLM_DEPLOYMENT_NAME"], - temperature=os.environ["AI_MODEL_TEMPERATURE"], - max_tokens=max_token_limit, verbose=verbose_models) + temperature=os.environ["AI_MODEL_TEMPERATURE"], + max_tokens=max_token_limit, verbose=verbose_models) condense_llm = AzureChatOpenAI(azure_deployment=os.environ["LLM_DEPLOYMENT_NAME"], - temperature=0, - verbose=verbose_models) + temperature=0, + verbose=verbose_models) def format_docs(docs): return "\n\n".join(doc.page_content for doc in docs) + DEFAULT_DOCUMENT_PROMPT = PromptTemplate.from_template(template="{page_content}") def _combine_documents( @@ -157,11 +183,10 @@ def _combine_documents( doc_strings = [format_document(doc, document_prompt) for doc in docs] return document_separator.join(doc_strings) - async def query_chain(question, language, chat_history): # check whether the chat history is empty - if chat_history.buffer == []: + if chat_history.buffer == []: first_call = True else: first_call = False @@ -176,11 +201,12 @@ async def query_chain(question, language, chat_history): # This adds a "memory" key to the input object loaded_memory = RunnablePassthrough.assign( chat_history=RunnableLambda(chat_history.load_memory_variables) | itemgetter("history"), - ) + ) logger.debug(f"loaded memory {loaded_memory}\n") logger.debug(f"chat history {chat_history}\n") + # Now we calculate the standalone question if the chat_history is not empty standalone_question = { "standalone_question": { @@ -208,10 +234,10 @@ async def query_chain(question, language, chat_history): "question": lambda x: x["standalone_question"], } - # Now we construct the inputs for the final prompt final_inputs = { "context": lambda x: _combine_documents(x["docs"]), + "chat_history" : lambda x: chat_history.buffer, "question": itemgetter("question"), "language": lambda x: language['language'], } @@ -222,7 +248,6 @@ async def query_chain(question, language, chat_history): "docs": itemgetter("docs"), } - # And now we put it all together in a 'RunnableBranch', so we only invoke the rephrasing part when the chat history is not empty final_chain = RunnableBranch( (lambda x: x["first_call"], loaded_memory | direct_question | retrieved_documents | answer), diff --git a/def_ingest.py b/def_ingest.py index b5fbdb2..0cefbf2 100644 --- a/def_ingest.py +++ b/def_ingest.py @@ -3,7 +3,7 @@ import sys import io import re -from langchain.embeddings import AzureOpenAIEmbeddings +from langchain_openai import AzureOpenAIEmbeddings from langchain.text_splitter import RecursiveCharacterTextSplitter from langchain.vectorstores import FAISS from langchain.callbacks import get_openai_callback @@ -26,7 +26,7 @@ # Create handlers c_handler = logging.StreamHandler(io.TextIOWrapper(sys.stdout.buffer, line_buffering=True)) -f_handler = logging.FileHandler(os.path.join(os.path.expanduser(local_path),'app.log')) +f_handler = logging.FileHandler(os.path.join(os.path.expanduser(local_path), 'app.log')) c_handler.setLevel(level=getattr(logging, LOG_LEVEL)) f_handler.setLevel(logging.WARNING) @@ -102,13 +102,13 @@ def read_and_parse_html(local_source_path, source_website_url, website_generated """ # Transform bs_transformer = BeautifulSoupTransformer() - + # Get all links from the sitemaps logger.info(f"generating html: {local_source_path}, {source_website_url}") full_sitemap_list = extract_urls_from_sitemap(website_generated_path) - exclusion_list = [os.sep + 'tag' + os.sep, - os.sep + 'category' + os.sep, + exclusion_list = [os.sep + 'tag' + os.sep, + os.sep + 'category' + os.sep, os.sep + 'help' + os.sep + 'index'] data = [] @@ -126,7 +126,10 @@ def read_and_parse_html(local_source_path, source_website_url, website_generated continue document = loader.load() # note h5 and h6 tags for our website contain a lot of irrelevant metadata - doc_transformed = bs_transformer.transform_documents(document, tags_to_extract=["p", "article", "title", "h1"], unwanted_tags=["h5", "h6"], remove_lines=True) + doc_transformed = bs_transformer.transform_documents( + document, tags_to_extract=[ + "p", "article", "title", "h1"], unwanted_tags=[ + "h5", "h6"], remove_lines=True) body_text = doc_transformed[0] # first remove duplicate spaces, then remove duplicate '\n\n', then remove duplicate '\n \n ' @@ -139,11 +142,11 @@ def read_and_parse_html(local_source_path, source_website_url, website_generated data.append(body_text) included_files.append(file_name) else: - #logger.info(f"document too small, not adding: {body_text.page_content}\n") + # logger.info(f"document too small, not adding: {body_text.page_content}\n") excluded_files.append(file_name) except Exception as e: - # logger.error(f"...unable to process file: {str(e)}") - error_files.append(file_name) + # logger.error(f"...unable to process file: {str(e)}") + error_files.append(file_name) logger.info(f"==> Returning {len(included_files)} files; {len(error_files)} gave errors + {len(excluded_files)} files were skipped") text_splitter = RecursiveCharacterTextSplitter(chunk_size=chunk_size, chunk_overlap=chunk_size/5) @@ -169,7 +172,7 @@ def clone_and_generate(website_repo, destination_path, source_path): destination_path: path to directory containing generated html files source_path: path to directory containing the checked out github repo Returns: - + """ logger.info(f"About to generate website: {website_repo}") @@ -179,7 +182,7 @@ def clone_and_generate(website_repo, destination_path, source_path): os.chdir(source_path) git_switch_command = ['git', 'switch', branch] git_directory = os.path.join(source_path, '.git') - # Check if the repository already exists in the source_path + # Check if the repository already exists in the source_path if os.path.exists(git_directory): # Repository exists, perform a git pull to update it logger.info(f"...git directory exists, pulling in {os.getcwd()}") @@ -191,7 +194,7 @@ def clone_and_generate(website_repo, destination_path, source_path): if (result_switch.returncode != 0): logger.error(f"Unable to switch {website_repo} repository: {result_switch.stderr}") else: - logger.info(f"...git directory does not exist, cloning in {os.getcwd()}")# Repository doesn't exist, perform a git clone + logger.info(f"...git directory does not exist, cloning in {os.getcwd()}") # Repository doesn't exist, perform a git clone clone_command = ['git', 'clone', "https://" + github_user + ":" + github_pat + "@" + website_repo, source_path] result_clone = subprocess.run(clone_command, capture_output=True, text=True) if (result_clone.returncode != 0): @@ -255,5 +258,5 @@ def create_vector_db(source_website_url, source_website_url2) -> None: # only execute if this is the main program run (so not imported) if __name__ == "__main__": - ingest(config['source_website'], config['website_repo'], website_generated_path, website_source_path, - config['source_website2'], config['website_repo2'], website_generated_path2, website_source_path2) + ingest(config['source_website'], config['website_repo'], website_generated_path, website_source_path, + config['source_website2'], config['website_repo2'], website_generated_path2, website_source_path2) diff --git a/guidance_engine.py b/guidance_engine.py index 1aa2b66..13c7597 100644 --- a/guidance_engine.py +++ b/guidance_engine.py @@ -1,6 +1,6 @@ from langchain.callbacks import get_openai_callback -from langchain.memory import ConversationBufferMemory -#import pika +from langchain.memory import ConversationBufferWindowMemory +# import pika import json import ai_adapter import logging @@ -10,6 +10,7 @@ import os import def_ingest import aio_pika +import aiormq from aio_pika import connect, RobustConnection, ExchangeType from config import config, website_source_path, website_generated_path, website_source_path2, website_generated_path2, vectordb_path, local_path, LOG_LEVEL @@ -21,7 +22,7 @@ # Create handlers c_handler = logging.StreamHandler(io.TextIOWrapper(sys.stdout.buffer, line_buffering=True)) -f_handler = logging.FileHandler(os.path.join(os.path.expanduser(local_path),'app.log')) +f_handler = logging.FileHandler(os.path.join(os.path.expanduser(local_path), 'app.log')) c_handler.setLevel(level=getattr(logging, LOG_LEVEL)) f_handler.setLevel(logging.WARNING) @@ -66,6 +67,7 @@ async def connect(self): self.channel = await self.connection.channel() await self.channel.declare_queue(self.queue, auto_delete=False) + rabbitmq = RabbitMQ( host=config['rabbitmq_host'], login=config['rabbitmq_user'], @@ -79,23 +81,20 @@ async def query(user_id, query, language_code): if user_id not in user_data: user_data[user_id] = {} - user_data[user_id]['chat_history'] = ConversationBufferMemory(return_messages=True, output_key="answer", input_key="question") - #user_chain[user_id]=ai_utils.setup_chain() + user_data[user_id]['chat_history'] = ConversationBufferWindowMemory(k=3, return_messages=True, output_key="answer", input_key="question") + # user_chain[user_id]=ai_utils.setup_chain() reset(user_id) - #chat_history=[] + # chat_history=[] user_data[user_id]['language'] = ai_adapter.get_language_by_code(language_code) logger.debug(f"\nlanguage: {user_data[user_id]['language']}\n") - #chat_history = user_data[user_id]['chat_history'] - - + # chat_history = user_data[user_id]['chat_history'] with get_openai_callback() as cb: llm_result = await ai_adapter.query_chain({"question": query}, {"language": user_data[user_id]['language']}, user_data[user_id]['chat_history']) answer = llm_result['answer'] - # clean up the document sources to avoid sending too much information over. sources = [doc.metadata['source'] for doc in llm_result['source_documents']] logger.debug(f"\n\nsources: {sources}\n\n") @@ -107,14 +106,12 @@ async def query(user_id, query, language_code): logger.debug(f"\n\nLLM result: {llm_result}\n\n") logger.info(f"\n\nanswer: {answer}\n\n") - logger.debug(f"\n\nsources: {sources}\n\ n") + logger.debug(f"\n\nsources: {sources}\n\\ n") user_data[user_id]['chat_history'].save_context({"question": query}, {"answer": answer.content}) logger.debug(f"new chat history {user_data[user_id]['chat_history']}\n") - response = json.dumps({ - "question": query, "answer": str(answer), "sources": sources, "prompt_tokens": cb.prompt_tokens, "completion_tokens": cb.completion_tokens, "total_tokens": cb.total_tokens, "total_cost": cb.total_cost - } - ) + response = json.dumps({"question": query, "answer": str(answer.content), "sources": sources, "prompt_tokens": cb.prompt_tokens, + "completion_tokens": cb.completion_tokens, "total_tokens": cb.total_tokens, "total_cost": cb.total_cost}) return response @@ -131,7 +128,6 @@ async def ingest(source_url, website_repo, destination_path, source_path, source return "Ingest function executed" - async def on_request(message: aio_pika.IncomingMessage): async with message.process(): # Parse the message body as JSON @@ -140,7 +136,7 @@ async def on_request(message: aio_pika.IncomingMessage): # Get the user ID from the message body user_id = body['data']['userId'] - logger.info(f"\nrequest arriving for user id: {user_id}, deciding what to do\n\n") + logger.info(f"\nrequest arriving for user id: {user_id}, deciding what to do\n\n") # If there's no lock for this user, create one if user_id not in user_locks: @@ -148,9 +144,9 @@ async def on_request(message: aio_pika.IncomingMessage): # Check if the lock is locked if user_locks[user_id].locked(): - logger.info(f"existing task running for user id: {user_id}, waiting for it to finish first\n\n") + logger.info(f"existing task running for user id: {user_id}, waiting for it to finish first\n\n") else: - logger.info(f"no task running for user id: {user_id}, let's move!\n\n") + logger.info(f"no task running for user id: {user_id}, let's move!\n\n") # Acquire the lock for this user async with user_locks[user_id]: @@ -181,7 +177,7 @@ async def process_message(message: aio_pika.IncomingMessage): else: if operation == 'query': if ('question' in body['data']) and ('language' in body['data']): - logger.info(f"query time for user id: {user_id}, let's call the query() function!\n\n") + logger.info(f"query time for user id: {user_id}, let's call the query() function!\n\n") response = await query(user_id, body['data']['question'], body['data']['language']) else: response = "Query parameter(s) not provided" @@ -190,18 +186,25 @@ async def process_message(message: aio_pika.IncomingMessage): else: response = "Unknown function" - await rabbitmq.channel.default_exchange.publish( - aio_pika.Message( - body=json.dumps({"operation": "feedback", "result": response}).encode(), - correlation_id=message.correlation_id, - reply_to=message.reply_to - ), - routing_key=message.reply_to - ) + try: + if rabbitmq.connection.is_closed or rabbitmq.channel.is_closed: + logger.error("Connection or channel is not open. Cannot publish message.") + return + + await rabbitmq.channel.default_exchange.publish( + aio_pika.Message( + body=json.dumps({"operation": "feedback", "result": response}).encode(), + correlation_id=message.correlation_id, + reply_to=message.reply_to + ), + routing_key=message.reply_to + ) + logger.info(f"Response sent for correlation_id: {message.correlation_id}") + logger.info(f"Response sent to: {message.reply_to}") + logger.debug(f"response: {response}") + except (aio_pika.exceptions.AMQPError, asyncio.exceptions.CancelledError, aiormq.exceptions.ChannelInvalidStateError) as e: + logger.error(f"Failed to publish message due to a RabbitMQ error: {e}") - logger.info(f"Response sent for correlation_id: {message.correlation_id}") - logger.info(f"Response sent to: {message.reply_to}") - logger.debug(f"response: {response}") async def main(): logger.info(f"main fucntion (re)starting\n") @@ -223,4 +226,3 @@ async def main(): loop = asyncio.get_event_loop() loop.run_until_complete(main()) - diff --git a/poetry.lock b/poetry.lock index e53533a..88e3a99 100644 --- a/poetry.lock +++ b/poetry.lock @@ -705,19 +705,19 @@ text-helpers = ["chardet (>=5.1.0,<6.0.0)"] [[package]] name = "langchain-community" -version = "0.0.8" +version = "0.0.11" description = "Community contributed LangChain integrations." optional = false python-versions = ">=3.8.1,<4.0" files = [ - {file = "langchain_community-0.0.8-py3-none-any.whl", hash = "sha256:cf8ac1c5dcf886e9c0e20842a8774e7753e9f87c83f0734f50c54cc3cc513eea"}, - {file = "langchain_community-0.0.8.tar.gz", hash = "sha256:d11207f9be3020a82f46927f47f5a75b29ed3ad29240bd3e8b7e862c41d69274"}, + {file = "langchain_community-0.0.11-py3-none-any.whl", hash = "sha256:30ab1d7dbf35d0ebe684d8a1e8964e8dedd3d31a3703790436b39674cfa06f41"}, + {file = "langchain_community-0.0.11.tar.gz", hash = "sha256:eaeaa8d63427ecf0cb32fe2f1ba4d05ad6d5ef9f7019baf21dc2dde5b1403002"}, ] [package.dependencies] aiohttp = ">=3.8.3,<4.0.0" dataclasses-json = ">=0.5.7,<0.7" -langchain-core = ">=0.1.5,<0.2" +langchain-core = ">=0.1.8,<0.2" langsmith = ">=0.0.63,<0.1.0" numpy = ">=1,<2" PyYAML = ">=5.3" @@ -731,13 +731,13 @@ extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15. [[package]] name = "langchain-core" -version = "0.1.5" +version = "0.1.10" description = "Building applications with LLMs through composability" optional = false python-versions = ">=3.8.1,<4.0" files = [ - {file = "langchain_core-0.1.5-py3-none-any.whl", hash = "sha256:e249c2cc858c5e3bc890e3f0aa91486a23b4a2845579c29311dc85f04dae8937"}, - {file = "langchain_core-0.1.5.tar.gz", hash = "sha256:a5d44dddac565c35dd3a23d4605405e8d4bb90604fc0713f13a56982a5082d14"}, + {file = "langchain_core-0.1.10-py3-none-any.whl", hash = "sha256:d89952f6d0766cfc88d9f1e25b84d56f8d7bd63a45ad8ec1a9a038c9b49df16d"}, + {file = "langchain_core-0.1.10.tar.gz", hash = "sha256:3c9e1383264c102fcc6f865700dbb9416c4931a25d0ac2195f6311c6b867aa17"}, ] [package.dependencies] @@ -753,15 +753,32 @@ tenacity = ">=8.1.0,<9.0.0" [package.extras] extended-testing = ["jinja2 (>=3,<4)"] +[[package]] +name = "langchain-openai" +version = "0.0.2.post1" +description = "An integration package connecting OpenAI and LangChain" +optional = false +python-versions = ">=3.8.1,<4.0" +files = [ + {file = "langchain_openai-0.0.2.post1-py3-none-any.whl", hash = "sha256:ba468b94c23da9d8ccefe5d5a3c1c65b4b9702292523e53acc689a9110022e26"}, + {file = "langchain_openai-0.0.2.post1.tar.gz", hash = "sha256:f8e78db4a663feeac71d9f036b9422406c199ea3ef4c97d99ff392c93530e073"}, +] + +[package.dependencies] +langchain-core = ">=0.1.7,<0.2" +numpy = ">=1,<2" +openai = ">=1.6.1,<2.0.0" +tiktoken = ">=0.5.2,<0.6.0" + [[package]] name = "langsmith" -version = "0.0.77" +version = "0.0.80" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false python-versions = ">=3.8.1,<4.0" files = [ - {file = "langsmith-0.0.77-py3-none-any.whl", hash = "sha256:750c0aa9177240c64e131d831e009ed08dd59038f7cabbd0bbcf62ccb7c8dcac"}, - {file = "langsmith-0.0.77.tar.gz", hash = "sha256:c4c8d3a96ad8671a41064f3ccc673e2e22a4153e823b19f915c9c9b8a4f33a2c"}, + {file = "langsmith-0.0.80-py3-none-any.whl", hash = "sha256:dee1c6ef9e8241b82a8851926624269954d0ff8e22d82e32e73455f387f4e245"}, + {file = "langsmith-0.0.80.tar.gz", hash = "sha256:6d22ee07eb41c65b3f5166b20041a026714952497d9e80d5be6879d3a5c14d84"}, ] [package.dependencies] @@ -782,131 +799,24 @@ files = [ [package.dependencies] python-dotenv = "*" -[[package]] -name = "lxml" -version = "4.9.4" -description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" -files = [ - {file = "lxml-4.9.4-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e214025e23db238805a600f1f37bf9f9a15413c7bf5f9d6ae194f84980c78722"}, - {file = "lxml-4.9.4-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:ec53a09aee61d45e7dbe7e91252ff0491b6b5fee3d85b2d45b173d8ab453efc1"}, - {file = "lxml-4.9.4-cp27-cp27m-win32.whl", hash = "sha256:7d1d6c9e74c70ddf524e3c09d9dc0522aba9370708c2cb58680ea40174800013"}, - {file = "lxml-4.9.4-cp27-cp27m-win_amd64.whl", hash = "sha256:cb53669442895763e61df5c995f0e8361b61662f26c1b04ee82899c2789c8f69"}, - {file = "lxml-4.9.4-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:647bfe88b1997d7ae8d45dabc7c868d8cb0c8412a6e730a7651050b8c7289cf2"}, - {file = "lxml-4.9.4-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:4d973729ce04784906a19108054e1fd476bc85279a403ea1a72fdb051c76fa48"}, - {file = "lxml-4.9.4-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:056a17eaaf3da87a05523472ae84246f87ac2f29a53306466c22e60282e54ff8"}, - {file = "lxml-4.9.4-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:aaa5c173a26960fe67daa69aa93d6d6a1cd714a6eb13802d4e4bd1d24a530644"}, - {file = "lxml-4.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:647459b23594f370c1c01768edaa0ba0959afc39caeeb793b43158bb9bb6a663"}, - {file = "lxml-4.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:bdd9abccd0927673cffe601d2c6cdad1c9321bf3437a2f507d6b037ef91ea307"}, - {file = "lxml-4.9.4-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:00e91573183ad273e242db5585b52670eddf92bacad095ce25c1e682da14ed91"}, - {file = "lxml-4.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a602ed9bd2c7d85bd58592c28e101bd9ff9c718fbde06545a70945ffd5d11868"}, - {file = "lxml-4.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:de362ac8bc962408ad8fae28f3967ce1a262b5d63ab8cefb42662566737f1dc7"}, - {file = "lxml-4.9.4-cp310-cp310-win32.whl", hash = "sha256:33714fcf5af4ff7e70a49731a7cc8fd9ce910b9ac194f66eaa18c3cc0a4c02be"}, - {file = "lxml-4.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:d3caa09e613ece43ac292fbed513a4bce170681a447d25ffcbc1b647d45a39c5"}, - {file = "lxml-4.9.4-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:359a8b09d712df27849e0bcb62c6a3404e780b274b0b7e4c39a88826d1926c28"}, - {file = "lxml-4.9.4-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:43498ea734ccdfb92e1886dfedaebeb81178a241d39a79d5351ba2b671bff2b2"}, - {file = "lxml-4.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:4855161013dfb2b762e02b3f4d4a21cc7c6aec13c69e3bffbf5022b3e708dd97"}, - {file = "lxml-4.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:c71b5b860c5215fdbaa56f715bc218e45a98477f816b46cfde4a84d25b13274e"}, - {file = "lxml-4.9.4-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:9a2b5915c333e4364367140443b59f09feae42184459b913f0f41b9fed55794a"}, - {file = "lxml-4.9.4-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d82411dbf4d3127b6cde7da0f9373e37ad3a43e89ef374965465928f01c2b979"}, - {file = "lxml-4.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:273473d34462ae6e97c0f4e517bd1bf9588aa67a1d47d93f760a1282640e24ac"}, - {file = "lxml-4.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:389d2b2e543b27962990ab529ac6720c3dded588cc6d0f6557eec153305a3622"}, - {file = "lxml-4.9.4-cp311-cp311-win32.whl", hash = "sha256:8aecb5a7f6f7f8fe9cac0bcadd39efaca8bbf8d1bf242e9f175cbe4c925116c3"}, - {file = "lxml-4.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:c7721a3ef41591341388bb2265395ce522aba52f969d33dacd822da8f018aff8"}, - {file = "lxml-4.9.4-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:dbcb2dc07308453db428a95a4d03259bd8caea97d7f0776842299f2d00c72fc8"}, - {file = "lxml-4.9.4-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:01bf1df1db327e748dcb152d17389cf6d0a8c5d533ef9bab781e9d5037619229"}, - {file = "lxml-4.9.4-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:e8f9f93a23634cfafbad6e46ad7d09e0f4a25a2400e4a64b1b7b7c0fbaa06d9d"}, - {file = "lxml-4.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3f3f00a9061605725df1816f5713d10cd94636347ed651abdbc75828df302b20"}, - {file = "lxml-4.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:953dd5481bd6252bd480d6ec431f61d7d87fdcbbb71b0d2bdcfc6ae00bb6fb10"}, - {file = "lxml-4.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:f1faee2a831fe249e1bae9cbc68d3cd8a30f7e37851deee4d7962b17c410dd56"}, - {file = "lxml-4.9.4-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:23d891e5bdc12e2e506e7d225d6aa929e0a0368c9916c1fddefab88166e98b20"}, - {file = "lxml-4.9.4-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:e96a1788f24d03e8d61679f9881a883ecdf9c445a38f9ae3f3f193ab6c591c66"}, - {file = "lxml-4.9.4-cp36-cp36m-macosx_11_0_x86_64.whl", hash = "sha256:5557461f83bb7cc718bc9ee1f7156d50e31747e5b38d79cf40f79ab1447afd2d"}, - {file = "lxml-4.9.4-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:fdb325b7fba1e2c40b9b1db407f85642e32404131c08480dd652110fc908561b"}, - {file = "lxml-4.9.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d74d4a3c4b8f7a1f676cedf8e84bcc57705a6d7925e6daef7a1e54ae543a197"}, - {file = "lxml-4.9.4-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:ac7674d1638df129d9cb4503d20ffc3922bd463c865ef3cb412f2c926108e9a4"}, - {file = "lxml-4.9.4-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:ddd92e18b783aeb86ad2132d84a4b795fc5ec612e3545c1b687e7747e66e2b53"}, - {file = "lxml-4.9.4-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2bd9ac6e44f2db368ef8986f3989a4cad3de4cd55dbdda536e253000c801bcc7"}, - {file = "lxml-4.9.4-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:bc354b1393dce46026ab13075f77b30e40b61b1a53e852e99d3cc5dd1af4bc85"}, - {file = "lxml-4.9.4-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:f836f39678cb47c9541f04d8ed4545719dc31ad850bf1832d6b4171e30d65d23"}, - {file = "lxml-4.9.4-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:9c131447768ed7bc05a02553d939e7f0e807e533441901dd504e217b76307745"}, - {file = "lxml-4.9.4-cp36-cp36m-win32.whl", hash = "sha256:bafa65e3acae612a7799ada439bd202403414ebe23f52e5b17f6ffc2eb98c2be"}, - {file = "lxml-4.9.4-cp36-cp36m-win_amd64.whl", hash = "sha256:6197c3f3c0b960ad033b9b7d611db11285bb461fc6b802c1dd50d04ad715c225"}, - {file = "lxml-4.9.4-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:7b378847a09d6bd46047f5f3599cdc64fcb4cc5a5a2dd0a2af610361fbe77b16"}, - {file = "lxml-4.9.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:1343df4e2e6e51182aad12162b23b0a4b3fd77f17527a78c53f0f23573663545"}, - {file = "lxml-4.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:6dbdacf5752fbd78ccdb434698230c4f0f95df7dd956d5f205b5ed6911a1367c"}, - {file = "lxml-4.9.4-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:506becdf2ecaebaf7f7995f776394fcc8bd8a78022772de66677c84fb02dd33d"}, - {file = "lxml-4.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ca8e44b5ba3edb682ea4e6185b49661fc22b230cf811b9c13963c9f982d1d964"}, - {file = "lxml-4.9.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9d9d5726474cbbef279fd709008f91a49c4f758bec9c062dfbba88eab00e3ff9"}, - {file = "lxml-4.9.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:bbdd69e20fe2943b51e2841fc1e6a3c1de460d630f65bde12452d8c97209464d"}, - {file = "lxml-4.9.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8671622256a0859f5089cbe0ce4693c2af407bc053dcc99aadff7f5310b4aa02"}, - {file = "lxml-4.9.4-cp37-cp37m-win32.whl", hash = "sha256:dd4fda67f5faaef4f9ee5383435048ee3e11ad996901225ad7615bc92245bc8e"}, - {file = "lxml-4.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6bee9c2e501d835f91460b2c904bc359f8433e96799f5c2ff20feebd9bb1e590"}, - {file = "lxml-4.9.4-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:1f10f250430a4caf84115b1e0f23f3615566ca2369d1962f82bef40dd99cd81a"}, - {file = "lxml-4.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:3b505f2bbff50d261176e67be24e8909e54b5d9d08b12d4946344066d66b3e43"}, - {file = "lxml-4.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:1449f9451cd53e0fd0a7ec2ff5ede4686add13ac7a7bfa6988ff6d75cff3ebe2"}, - {file = "lxml-4.9.4-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:4ece9cca4cd1c8ba889bfa67eae7f21d0d1a2e715b4d5045395113361e8c533d"}, - {file = "lxml-4.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:59bb5979f9941c61e907ee571732219fa4774d5a18f3fa5ff2df963f5dfaa6bc"}, - {file = "lxml-4.9.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:b1980dbcaad634fe78e710c8587383e6e3f61dbe146bcbfd13a9c8ab2d7b1192"}, - {file = "lxml-4.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9ae6c3363261021144121427b1552b29e7b59de9d6a75bf51e03bc072efb3c37"}, - {file = "lxml-4.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bcee502c649fa6351b44bb014b98c09cb00982a475a1912a9881ca28ab4f9cd9"}, - {file = "lxml-4.9.4-cp38-cp38-win32.whl", hash = "sha256:a8edae5253efa75c2fc79a90068fe540b197d1c7ab5803b800fccfe240eed33c"}, - {file = "lxml-4.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:701847a7aaefef121c5c0d855b2affa5f9bd45196ef00266724a80e439220e46"}, - {file = "lxml-4.9.4-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:f610d980e3fccf4394ab3806de6065682982f3d27c12d4ce3ee46a8183d64a6a"}, - {file = "lxml-4.9.4-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:aa9b5abd07f71b081a33115d9758ef6077924082055005808f68feccb27616bd"}, - {file = "lxml-4.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:365005e8b0718ea6d64b374423e870648ab47c3a905356ab6e5a5ff03962b9a9"}, - {file = "lxml-4.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:16b9ec51cc2feab009e800f2c6327338d6ee4e752c76e95a35c4465e80390ccd"}, - {file = "lxml-4.9.4-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:a905affe76f1802edcac554e3ccf68188bea16546071d7583fb1b693f9cf756b"}, - {file = "lxml-4.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fd814847901df6e8de13ce69b84c31fc9b3fb591224d6762d0b256d510cbf382"}, - {file = "lxml-4.9.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:91bbf398ac8bb7d65a5a52127407c05f75a18d7015a270fdd94bbcb04e65d573"}, - {file = "lxml-4.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f99768232f036b4776ce419d3244a04fe83784bce871b16d2c2e984c7fcea847"}, - {file = "lxml-4.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bb5bd6212eb0edfd1e8f254585290ea1dadc3687dd8fd5e2fd9a87c31915cdab"}, - {file = "lxml-4.9.4-cp39-cp39-win32.whl", hash = "sha256:88f7c383071981c74ec1998ba9b437659e4fd02a3c4a4d3efc16774eb108d0ec"}, - {file = "lxml-4.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:936e8880cc00f839aa4173f94466a8406a96ddce814651075f95837316369899"}, - {file = "lxml-4.9.4-pp310-pypy310_pp73-macosx_11_0_x86_64.whl", hash = "sha256:f6c35b2f87c004270fa2e703b872fcc984d714d430b305145c39d53074e1ffe0"}, - {file = "lxml-4.9.4-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:606d445feeb0856c2b424405236a01c71af7c97e5fe42fbc778634faef2b47e4"}, - {file = "lxml-4.9.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a1bdcbebd4e13446a14de4dd1825f1e778e099f17f79718b4aeaf2403624b0f7"}, - {file = "lxml-4.9.4-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:0a08c89b23117049ba171bf51d2f9c5f3abf507d65d016d6e0fa2f37e18c0fc5"}, - {file = "lxml-4.9.4-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:232fd30903d3123be4c435fb5159938c6225ee8607b635a4d3fca847003134ba"}, - {file = "lxml-4.9.4-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:231142459d32779b209aa4b4d460b175cadd604fed856f25c1571a9d78114771"}, - {file = "lxml-4.9.4-pp38-pypy38_pp73-macosx_11_0_x86_64.whl", hash = "sha256:520486f27f1d4ce9654154b4494cf9307b495527f3a2908ad4cb48e4f7ed7ef7"}, - {file = "lxml-4.9.4-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:562778586949be7e0d7435fcb24aca4810913771f845d99145a6cee64d5b67ca"}, - {file = "lxml-4.9.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:a9e7c6d89c77bb2770c9491d988f26a4b161d05c8ca58f63fb1f1b6b9a74be45"}, - {file = "lxml-4.9.4-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:786d6b57026e7e04d184313c1359ac3d68002c33e4b1042ca58c362f1d09ff58"}, - {file = "lxml-4.9.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:95ae6c5a196e2f239150aa4a479967351df7f44800c93e5a975ec726fef005e2"}, - {file = "lxml-4.9.4-pp39-pypy39_pp73-macosx_11_0_x86_64.whl", hash = "sha256:9b556596c49fa1232b0fff4b0e69b9d4083a502e60e404b44341e2f8fb7187f5"}, - {file = "lxml-4.9.4-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:cc02c06e9e320869d7d1bd323df6dd4281e78ac2e7f8526835d3d48c69060683"}, - {file = "lxml-4.9.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:857d6565f9aa3464764c2cb6a2e3c2e75e1970e877c188f4aeae45954a314e0c"}, - {file = "lxml-4.9.4-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c42ae7e010d7d6bc51875d768110c10e8a59494855c3d4c348b068f5fb81fdcd"}, - {file = "lxml-4.9.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:f10250bb190fb0742e3e1958dd5c100524c2cc5096c67c8da51233f7448dc137"}, - {file = "lxml-4.9.4.tar.gz", hash = "sha256:b1541e50b78e15fa06a2670157a1962ef06591d4c998b998047fff5e3236880e"}, -] - -[package.extras] -cssselect = ["cssselect (>=0.7)"] -html5 = ["html5lib"] -htmlsoup = ["BeautifulSoup4"] -source = ["Cython (==0.29.37)"] - [[package]] name = "marshmallow" -version = "3.20.1" +version = "3.20.2" description = "A lightweight library for converting complex datatypes to and from native Python datatypes." optional = false python-versions = ">=3.8" files = [ - {file = "marshmallow-3.20.1-py3-none-any.whl", hash = "sha256:684939db93e80ad3561392f47be0230743131560a41c5110684c16e21ade0a5c"}, - {file = "marshmallow-3.20.1.tar.gz", hash = "sha256:5d2371bbe42000f2b3fb5eaa065224df7d8f8597bc19a1bbfa5bfe7fba8da889"}, + {file = "marshmallow-3.20.2-py3-none-any.whl", hash = "sha256:c21d4b98fee747c130e6bc8f45c4b3199ea66bc00c12ee1f639f0aeca034d5e9"}, + {file = "marshmallow-3.20.2.tar.gz", hash = "sha256:4c1daff273513dc5eb24b219a8035559dc573c8f322558ef85f5438ddd1236dd"}, ] [package.dependencies] packaging = ">=17.0" [package.extras] -dev = ["flake8 (==6.0.0)", "flake8-bugbear (==23.7.10)", "mypy (==1.4.1)", "pre-commit (>=2.4,<4.0)", "pytest", "pytz", "simplejson", "tox"] -docs = ["alabaster (==0.7.13)", "autodocsumm (==0.2.11)", "sphinx (==7.0.1)", "sphinx-issues (==3.0.1)", "sphinx-version-warning (==1.1.2)"] -lint = ["flake8 (==6.0.0)", "flake8-bugbear (==23.7.10)", "mypy (==1.4.1)", "pre-commit (>=2.4,<4.0)"] +dev = ["pre-commit (>=2.4,<4.0)", "pytest", "pytz", "simplejson", "tox"] +docs = ["alabaster (==0.7.15)", "autodocsumm (==0.2.12)", "sphinx (==7.2.6)", "sphinx-issues (==3.0.1)", "sphinx-version-warning (==1.1.2)"] +lint = ["pre-commit (>=2.4,<4.0)"] tests = ["pytest", "pytz", "simplejson"] [[package]] @@ -1061,13 +971,13 @@ files = [ [[package]] name = "openai" -version = "1.6.1" +version = "1.7.2" description = "The official Python library for the openai API" optional = false python-versions = ">=3.7.1" files = [ - {file = "openai-1.6.1-py3-none-any.whl", hash = "sha256:bc9f774838d67ac29fb24cdeb2d58faf57de8b311085dcd1348f7aa02a96c7ee"}, - {file = "openai-1.6.1.tar.gz", hash = "sha256:d553ca9dbf9486b08e75b09e8671e4f638462aaadccfced632bf490fc3d75fa2"}, + {file = "openai-1.7.2-py3-none-any.whl", hash = "sha256:8f41b90a762f5fd9d182b45851041386fed94c8ad240a70abefee61a68e0ef53"}, + {file = "openai-1.7.2.tar.gz", hash = "sha256:c73c78878258b07f1b468b0602c6591f25a1478f49ecb90b9bd44b7cc80bce73"}, ] [package.dependencies] @@ -1108,91 +1018,6 @@ files = [ codegen = ["lxml", "requests", "yapf"] testing = ["coverage", "flake8", "flake8-comprehensions", "flake8-deprecated", "flake8-import-order", "flake8-print", "flake8-quotes", "flake8-rst-docstrings", "flake8-tuple", "yapf"] -[[package]] -name = "pillow" -version = "10.2.0" -description = "Python Imaging Library (Fork)" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pillow-10.2.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:7823bdd049099efa16e4246bdf15e5a13dbb18a51b68fa06d6c1d4d8b99a796e"}, - {file = "pillow-10.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:83b2021f2ade7d1ed556bc50a399127d7fb245e725aa0113ebd05cfe88aaf588"}, - {file = "pillow-10.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fad5ff2f13d69b7e74ce5b4ecd12cc0ec530fcee76356cac6742785ff71c452"}, - {file = "pillow-10.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da2b52b37dad6d9ec64e653637a096905b258d2fc2b984c41ae7d08b938a67e4"}, - {file = "pillow-10.2.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:47c0995fc4e7f79b5cfcab1fc437ff2890b770440f7696a3ba065ee0fd496563"}, - {file = "pillow-10.2.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:322bdf3c9b556e9ffb18f93462e5f749d3444ce081290352c6070d014c93feb2"}, - {file = "pillow-10.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:51f1a1bffc50e2e9492e87d8e09a17c5eea8409cda8d3f277eb6edc82813c17c"}, - {file = "pillow-10.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:69ffdd6120a4737710a9eee73e1d2e37db89b620f702754b8f6e62594471dee0"}, - {file = "pillow-10.2.0-cp310-cp310-win32.whl", hash = "sha256:c6dafac9e0f2b3c78df97e79af707cdc5ef8e88208d686a4847bab8266870023"}, - {file = "pillow-10.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:aebb6044806f2e16ecc07b2a2637ee1ef67a11840a66752751714a0d924adf72"}, - {file = "pillow-10.2.0-cp310-cp310-win_arm64.whl", hash = "sha256:7049e301399273a0136ff39b84c3678e314f2158f50f517bc50285fb5ec847ad"}, - {file = "pillow-10.2.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:35bb52c37f256f662abdfa49d2dfa6ce5d93281d323a9af377a120e89a9eafb5"}, - {file = "pillow-10.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9c23f307202661071d94b5e384e1e1dc7dfb972a28a2310e4ee16103e66ddb67"}, - {file = "pillow-10.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:773efe0603db30c281521a7c0214cad7836c03b8ccff897beae9b47c0b657d61"}, - {file = "pillow-10.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11fa2e5984b949b0dd6d7a94d967743d87c577ff0b83392f17cb3990d0d2fd6e"}, - {file = "pillow-10.2.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:716d30ed977be8b37d3ef185fecb9e5a1d62d110dfbdcd1e2a122ab46fddb03f"}, - {file = "pillow-10.2.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a086c2af425c5f62a65e12fbf385f7c9fcb8f107d0849dba5839461a129cf311"}, - {file = "pillow-10.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c8de2789052ed501dd829e9cae8d3dcce7acb4777ea4a479c14521c942d395b1"}, - {file = "pillow-10.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:609448742444d9290fd687940ac0b57fb35e6fd92bdb65386e08e99af60bf757"}, - {file = "pillow-10.2.0-cp311-cp311-win32.whl", hash = "sha256:823ef7a27cf86df6597fa0671066c1b596f69eba53efa3d1e1cb8b30f3533068"}, - {file = "pillow-10.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:1da3b2703afd040cf65ec97efea81cfba59cdbed9c11d8efc5ab09df9509fc56"}, - {file = "pillow-10.2.0-cp311-cp311-win_arm64.whl", hash = "sha256:edca80cbfb2b68d7b56930b84a0e45ae1694aeba0541f798e908a49d66b837f1"}, - {file = "pillow-10.2.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:1b5e1b74d1bd1b78bc3477528919414874748dd363e6272efd5abf7654e68bef"}, - {file = "pillow-10.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0eae2073305f451d8ecacb5474997c08569fb4eb4ac231ffa4ad7d342fdc25ac"}, - {file = "pillow-10.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7c2286c23cd350b80d2fc9d424fc797575fb16f854b831d16fd47ceec078f2c"}, - {file = "pillow-10.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e23412b5c41e58cec602f1135c57dfcf15482013ce6e5f093a86db69646a5aa"}, - {file = "pillow-10.2.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:52a50aa3fb3acb9cf7213573ef55d31d6eca37f5709c69e6858fe3bc04a5c2a2"}, - {file = "pillow-10.2.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:127cee571038f252a552760076407f9cff79761c3d436a12af6000cd182a9d04"}, - {file = "pillow-10.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:8d12251f02d69d8310b046e82572ed486685c38f02176bd08baf216746eb947f"}, - {file = "pillow-10.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:54f1852cd531aa981bc0965b7d609f5f6cc8ce8c41b1139f6ed6b3c54ab82bfb"}, - {file = "pillow-10.2.0-cp312-cp312-win32.whl", hash = "sha256:257d8788df5ca62c980314053197f4d46eefedf4e6175bc9412f14412ec4ea2f"}, - {file = "pillow-10.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:154e939c5f0053a383de4fd3d3da48d9427a7e985f58af8e94d0b3c9fcfcf4f9"}, - {file = "pillow-10.2.0-cp312-cp312-win_arm64.whl", hash = "sha256:f379abd2f1e3dddb2b61bc67977a6b5a0a3f7485538bcc6f39ec76163891ee48"}, - {file = "pillow-10.2.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:8373c6c251f7ef8bda6675dd6d2b3a0fcc31edf1201266b5cf608b62a37407f9"}, - {file = "pillow-10.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:870ea1ada0899fd0b79643990809323b389d4d1d46c192f97342eeb6ee0b8483"}, - {file = "pillow-10.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4b6b1e20608493548b1f32bce8cca185bf0480983890403d3b8753e44077129"}, - {file = "pillow-10.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3031709084b6e7852d00479fd1d310b07d0ba82765f973b543c8af5061cf990e"}, - {file = "pillow-10.2.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:3ff074fc97dd4e80543a3e91f69d58889baf2002b6be64347ea8cf5533188213"}, - {file = "pillow-10.2.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:cb4c38abeef13c61d6916f264d4845fab99d7b711be96c326b84df9e3e0ff62d"}, - {file = "pillow-10.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b1b3020d90c2d8e1dae29cf3ce54f8094f7938460fb5ce8bc5c01450b01fbaf6"}, - {file = "pillow-10.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:170aeb00224ab3dc54230c797f8404507240dd868cf52066f66a41b33169bdbe"}, - {file = "pillow-10.2.0-cp38-cp38-win32.whl", hash = "sha256:c4225f5220f46b2fde568c74fca27ae9771536c2e29d7c04f4fb62c83275ac4e"}, - {file = "pillow-10.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:0689b5a8c5288bc0504d9fcee48f61a6a586b9b98514d7d29b840143d6734f39"}, - {file = "pillow-10.2.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:b792a349405fbc0163190fde0dc7b3fef3c9268292586cf5645598b48e63dc67"}, - {file = "pillow-10.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c570f24be1e468e3f0ce7ef56a89a60f0e05b30a3669a459e419c6eac2c35364"}, - {file = "pillow-10.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8ecd059fdaf60c1963c58ceb8997b32e9dc1b911f5da5307aab614f1ce5c2fb"}, - {file = "pillow-10.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c365fd1703040de1ec284b176d6af5abe21b427cb3a5ff68e0759e1e313a5e7e"}, - {file = "pillow-10.2.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:70c61d4c475835a19b3a5aa42492409878bbca7438554a1f89d20d58a7c75c01"}, - {file = "pillow-10.2.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b6f491cdf80ae540738859d9766783e3b3c8e5bd37f5dfa0b76abdecc5081f13"}, - {file = "pillow-10.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d189550615b4948f45252d7f005e53c2040cea1af5b60d6f79491a6e147eef7"}, - {file = "pillow-10.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:49d9ba1ed0ef3e061088cd1e7538a0759aab559e2e0a80a36f9fd9d8c0c21591"}, - {file = "pillow-10.2.0-cp39-cp39-win32.whl", hash = "sha256:babf5acfede515f176833ed6028754cbcd0d206f7f614ea3447d67c33be12516"}, - {file = "pillow-10.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:0304004f8067386b477d20a518b50f3fa658a28d44e4116970abfcd94fac34a8"}, - {file = "pillow-10.2.0-cp39-cp39-win_arm64.whl", hash = "sha256:0fb3e7fc88a14eacd303e90481ad983fd5b69c761e9e6ef94c983f91025da869"}, - {file = "pillow-10.2.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:322209c642aabdd6207517e9739c704dc9f9db943015535783239022002f054a"}, - {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3eedd52442c0a5ff4f887fab0c1c0bb164d8635b32c894bc1faf4c618dd89df2"}, - {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb28c753fd5eb3dd859b4ee95de66cc62af91bcff5db5f2571d32a520baf1f04"}, - {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:33870dc4653c5017bf4c8873e5488d8f8d5f8935e2f1fb9a2208c47cdd66efd2"}, - {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3c31822339516fb3c82d03f30e22b1d038da87ef27b6a78c9549888f8ceda39a"}, - {file = "pillow-10.2.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a2b56ba36e05f973d450582fb015594aaa78834fefe8dfb8fcd79b93e64ba4c6"}, - {file = "pillow-10.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:d8e6aeb9201e655354b3ad049cb77d19813ad4ece0df1249d3c793de3774f8c7"}, - {file = "pillow-10.2.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:2247178effb34a77c11c0e8ac355c7a741ceca0a732b27bf11e747bbc950722f"}, - {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15587643b9e5eb26c48e49a7b33659790d28f190fc514a322d55da2fb5c2950e"}, - {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753cd8f2086b2b80180d9b3010dd4ed147efc167c90d3bf593fe2af21265e5a5"}, - {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7c8f97e8e7a9009bcacbe3766a36175056c12f9a44e6e6f2d5caad06dcfbf03b"}, - {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d1b35bcd6c5543b9cb547dee3150c93008f8dd0f1fef78fc0cd2b141c5baf58a"}, - {file = "pillow-10.2.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fe4c15f6c9285dc54ce6553a3ce908ed37c8f3825b5a51a15c91442bb955b868"}, - {file = "pillow-10.2.0.tar.gz", hash = "sha256:e87f0b2c78157e12d7686b27d63c070fd65d994e8ddae6f328e0dcf4a0cd007e"}, -] - -[package.extras] -docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"] -fpx = ["olefile"] -mic = ["olefile"] -tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] -typing = ["typing-extensions"] -xmp = ["defusedxml"] - [[package]] name = "pycodestyle" version = "2.11.1" @@ -1351,21 +1176,6 @@ files = [ {file = "pyflakes-3.1.0.tar.gz", hash = "sha256:a0aae034c444db0071aa077972ba4768d40c830d9539fd45bf4cd3f8f6992efc"}, ] -[[package]] -name = "pytesseract" -version = "0.3.10" -description = "Python-tesseract is a python wrapper for Google's Tesseract-OCR" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pytesseract-0.3.10-py3-none-any.whl", hash = "sha256:8f22cc98f765bf13517ead0c70effedb46c153540d25783e04014f28b55a5fc6"}, - {file = "pytesseract-0.3.10.tar.gz", hash = "sha256:f1c3a8b0f07fd01a1085d451f5b8315be6eec1d5577a6796d46dc7a62bd4120f"}, -] - -[package.dependencies] -packaging = ">=21.3" -Pillow = ">=8.0.0" - [[package]] name = "python-dotenv" version = "1.0.0" @@ -1904,5 +1714,5 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" -python-versions = "^3.11" -content-hash = "3c71c6e220334d57cf256fefd64d9668c297166751875e1958da1d8c775d6803" +python-versions = "~3.11" +content-hash = "9f2ababa8b9b88a9495b65e23ef893e466b2f49c066fec21f38a693839e02218" diff --git a/pyproject.toml b/pyproject.toml index c3a1154..0f92a8b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,13 +1,13 @@ [tool.poetry] name = "guidance-engine" -version = "0.5.2" +version = "0.6.0" description = "Alkemio Generative AI Guidance Engine" authors = ["Alkemio BV "] license = "EUPL-1.2" readme = "README.md" [tool.poetry.dependencies] -python = "^3.11" +python = "~3.11" langchain = "^0.0.354" langchain-core= "^0.1.5" langchain-community = ">=0.0.8" @@ -15,12 +15,12 @@ python-dotenv = "^1.0.0" faiss-cpu = "^1.7.4" openai = "^1.3.8" load-dotenv = "^0.1.0" -pytesseract = "^0.3.10" -tiktoken = "^0.5.2" beautifulsoup4 = "^4.12.2" -lxml = "^4.9.3" async-timeout = "^4.0.3" aio-pika = "^9.3.1" +aiormq = "^6.7.7" +tiktoken = "^0.5.2" +langchain-openai = "^0.0.2" [tool.poetry.group.dev.dependencies]