-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Deployed ccf0e63 with MkDocs version: 1.6.0
- Loading branch information
0 parents
commit 29d630b
Showing
84 changed files
with
61,287 additions
and
0 deletions.
There are no files selected for viewing
Empty file.
Large diffs are not rendered by default.
Oops, something went wrong.
Large diffs are not rendered by default.
Oops, something went wrong.
Large diffs are not rendered by default.
Oops, something went wrong.
Large diffs are not rendered by default.
Oops, something went wrong.
Large diffs are not rendered by default.
Oops, something went wrong.
Large diffs are not rendered by default.
Oops, something went wrong.
Large diffs are not rendered by default.
Oops, something went wrong.
1,485 changes: 1,485 additions & 0 deletions
1,485
Rag Pipeline/Developer Tutorials/change data input/index.html
Large diffs are not rendered by default.
Oops, something went wrong.
1,475 changes: 1,475 additions & 0 deletions
1,475
Rag Pipeline/Developer Tutorials/change model/index.html
Large diffs are not rendered by default.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,63 @@ | ||
# --- | ||
# jupyter: | ||
# jupytext: | ||
# text_representation: | ||
# extension: .py | ||
# format_name: light | ||
# format_version: '1.5' | ||
# jupytext_version: 1.16.3 | ||
# kernelspec: | ||
# display_name: openml | ||
# language: python | ||
# name: python3 | ||
# --- | ||
|
||
# # Tutorial on changing models | ||
# - How would you use a different embedding and llm model? | ||
|
||
from __future__ import annotations | ||
from langchain_community.cache import SQLiteCache | ||
import os | ||
import sys | ||
import chromadb | ||
|
||
from backend.modules.utils import load_config_and_device | ||
from backend.modules.rag_llm import QASetup | ||
|
||
# ## Initial config | ||
|
||
config = load_config_and_device("../../../backend/config.json") | ||
config["persist_dir"] = "../../data/doc_examples/chroma_db/" | ||
config["data_dir"] = "../../data/doc_examples/" | ||
config["type_of_data"] = "dataset" | ||
config["training"] = True | ||
config["test_subset"] = True # set this to false while training, this is for demo | ||
# load the persistent database using ChromaDB | ||
client = chromadb.PersistentClient(path=config["persist_dir"]) | ||
print(config) | ||
|
||
# ## Embedding model | ||
# - Pick a model from HF | ||
|
||
config["embedding_model"] = "BAAI/bge-large-en-v1.5" | ||
|
||
# ## LLM model | ||
|
||
# - Pick a model from Ollama - https://ollama.com/library?sort=popular | ||
# - eg : mistral | ||
# | ||
|
||
config["llm_model"] = "mistral" | ||
|
||
# + | ||
qa_dataset_handler = QASetup( | ||
config=config, | ||
data_type=config["type_of_data"], | ||
client=client, | ||
) | ||
|
||
qa_dataset, _ = qa_dataset_handler.setup_vector_db_and_qa() | ||
# - | ||
|
||
# # IMPORTANT | ||
# - Do NOT forget to change the model to the best model in ollama/get_ollama.sh |
Oops, something went wrong.