Skip to content

Commit

Permalink
Merge pull request #144 from giladbarnea/fix-openai-streaming-respons…
Browse files Browse the repository at this point in the history
…e-typeerror

Fix OpenAI handle_streaming_response TypeError
  • Loading branch information
matankley authored Oct 16, 2023
2 parents edb07da + 134cd20 commit 0021901
Show file tree
Hide file tree
Showing 2 changed files with 10 additions and 10 deletions.
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "declarai"
version = "0.1.12"
version = "0.1.13"
description = "Declarai, turning Python code into LLM tasks, easy to use, and production-ready."
authors = ["Aviv Almashanu <[email protected]>"]
readme = "README.md"
Expand Down
18 changes: 9 additions & 9 deletions src/declarai/operators/openai_operators/openai_llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -243,15 +243,15 @@ def handle_streaming_response(api_response: OpenAIObject) -> Iterator[LLMRespons
"""
response = {"role": None, "response": "", "raw_response": ""}

for r in api_response: # noqa
response["raw_response"] = r.to_dict_recursive()

delta = r.choices[0]["delta"]
response["model"] = r.model
if r.get('usage'):
response["prompt_tokens"] = r.usage["prompt_tokens"]
response["completion_tokens"] = r.usage["completion_tokens"]
response["total_tokens"] = r.usage["total_tokens"]
chunk: OpenAIObject
for chunk in api_response: # noqa
response["raw_response"] = chunk.to_dict_recursive()
delta = chunk.choices[0]["delta"]
response["model"] = chunk.model
if chunk.get("usage"):
response["prompt_tokens"] = chunk.usage.get("prompt_tokens")
response["completion_tokens"] = chunk.usage.get("completion_tokens")
response["total_tokens"] = chunk.usage.get("total_tokens")

if "role" in delta:
response["role"] = delta["role"]
Expand Down

0 comments on commit 0021901

Please sign in to comment.