Skip to content

Commit

Permalink
Add check if no function respnse
Browse files Browse the repository at this point in the history
  • Loading branch information
lalalune committed Jul 20, 2023
1 parent 2a107a8 commit 10818e7
Showing 1 changed file with 11 additions and 4 deletions.
15 changes: 11 additions & 4 deletions easycompletion/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,8 +77,11 @@ def validate_functions(response, functions, function_call):
Usage:
isValid = validate_functions(response, functions, function_call)
"""
# Extract the function call from the response
response_function_call = response["choices"][0]["message"]["function_call"]
response_function_call = response["choices"][0]["message"].get(
"function_call", None
)
if response_function_call is None:
return False

# If function_call is not "auto" and the name does not match with the response, return False
if (
Expand Down Expand Up @@ -193,7 +196,9 @@ def openai_text_call(
if total_tokens > chunk_length and "16k" not in model:
model = long_text_model
if not os.environ.get("SUPPRESS_WARNINGS"):
print("Warning: Message is long. Using 16k model (to hide this message, set SUPPRESS_WARNINGS=1)")
print(
"Warning: Message is long. Using 16k model (to hide this message, set SUPPRESS_WARNINGS=1)"
)

# If text is too long even for long text model, return None
if total_tokens > (16384 - chunk_length):
Expand Down Expand Up @@ -341,7 +346,9 @@ def openai_function_call(
if total_tokens > chunk_length and "16k" not in model:
model = long_text_model
if not os.environ.get("SUPPRESS_WARNINGS"):
print("Warning: Message is long. Using 16k model (to hide this message, set SUPPRESS_WARNINGS=1)")
print(
"Warning: Message is long. Using 16k model (to hide this message, set SUPPRESS_WARNINGS=1)"
)

# Check if the total number of tokens exceeds the maximum allowable tokens for the model
if total_tokens > (16384 - chunk_length):
Expand Down

0 comments on commit 10818e7

Please sign in to comment.