Skip to content

Commit

Permalink
Revert "cleanup"
Browse files Browse the repository at this point in the history
This reverts commit a7e06d0.
  • Loading branch information
vivi committed Oct 25, 2023
1 parent 6cd2a00 commit f7a9540
Showing 1 changed file with 7 additions and 9 deletions.
16 changes: 7 additions & 9 deletions memgpt/local_llm/chat_completion_proxy.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,14 +5,12 @@
import json

from .webui.api import get_webui_completion
from .llm_chat_completion_wrappers import airoboros, dolphin
from .llm_chat_completion_wrappers import airoboros
from .utils import DotDict

HOST = os.getenv("OPENAI_API_BASE")
HOST_TYPE = os.getenv("BACKEND_TYPE") # default None == ChatCompletion
DEBUG = False
# DEBUG = True
DEFAULT_WRAPPER = airoboros.Airoboros21InnerMonologueWrapper()


async def get_chat_completion(
Expand All @@ -24,14 +22,14 @@ async def get_chat_completion(
if function_call != "auto":
raise ValueError(f"function_call == {function_call} not supported (auto only)")

if model == "airoboros-l2-70b-2.1":
llm_wrapper = airoboros.Airoboros21InnerMonologueWrapper()
elif model == "dolphin-2.1-mistral-7b":
llm_wrapper = dolphin.Dolphin21MistralWrapper()
if model == "airoboros_v2.1":
llm_wrapper = airoboros.Airoboros21Wrapper()
else:
# Warn the user that we're using the fallback
print(f"Warning: no wrapper specified for local LLM, using the default wrapper")
llm_wrapper = DEFAULT_WRAPPER
print(
f"Warning: could not find an LLM wrapper for {model}, using the airoboros wrapper"
)
llm_wrapper = airoboros.Airoboros21Wrapper()

# First step: turn the message sequence into a prompt that the model expects
prompt = llm_wrapper.chat_completion_to_prompt(messages, functions)
Expand Down

0 comments on commit f7a9540

Please sign in to comment.