Skip to content

Commit

Permalink
Validate chatcompletion to avoid unexpected bugs
Browse files Browse the repository at this point in the history
Signed-off-by: Sambhav Kothari <[email protected]>
  • Loading branch information
sambhav committed Dec 26, 2024
1 parent 875d5eb commit 6ce4aa0
Show file tree
Hide file tree
Showing 2 changed files with 11 additions and 0 deletions.
2 changes: 2 additions & 0 deletions pydantic_ai_slim/pydantic_ai/models/openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -203,6 +203,8 @@ async def _completions_create(
@staticmethod
def _process_response(response: chat.ChatCompletion) -> ModelResponse:
"""Process a non-streamed response, and prepare a message to return."""
if not response.created:
raise UnexpectedModelBehavior('Response has no timestamp', body=response.to_json())
timestamp = datetime.fromtimestamp(response.created, tz=timezone.utc)
choice = response.choices[0]
items: list[ModelResponsePart] = []
Expand Down
9 changes: 9 additions & 0 deletions tests/models/test_openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -473,6 +473,15 @@ async def test_no_content(allow_model_requests: None):
pass


async def test_no_sync_content(allow_model_requests: None):
mock_client = MockOpenAI.create_mock(chat.ChatCompletion(id="", choices=[], created=0, model="", object="chat.completion"))
m = OpenAIModel('gpt-4', openai_client=mock_client)
agent = Agent(m, result_type=MyTypedDict)

with pytest.raises(UnexpectedModelBehavior, match='Response has no timestamp'):
await agent.run("")


async def test_no_delta(allow_model_requests: None):
stream = (
chunk([]),
Expand Down

0 comments on commit 6ce4aa0

Please sign in to comment.