Skip to content

Commit 5c2e244

Browse files
authored
fix: fix mypy issue in OpenAIChatGenerator that appears since mypy 1.16 (#9456)
* fix mypy issue with 1.16 * add release note
1 parent aa3d046 commit 5c2e244

File tree

2 files changed

+18
-2
lines changed

2 files changed

+18
-2
lines changed

haystack/components/generators/chat/openai.py

Lines changed: 14 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -421,27 +421,39 @@ def _handle_stream_response(self, chat_completion: Stream, callback: SyncStreami
421421
chunks: List[StreamingChunk] = []
422422
chunk = None
423423
chunk_delta: StreamingChunk
424+
last_chunk: Optional[ChatCompletionChunk] = None
424425

425426
for chunk in chat_completion: # pylint: disable=not-an-iterable
426427
assert len(chunk.choices) <= 1, "Streaming responses should have at most one choice."
427428
chunk_delta = self._convert_chat_completion_chunk_to_streaming_chunk(chunk)
428429
chunks.append(chunk_delta)
429430
callback(chunk_delta)
430-
return [self._convert_streaming_chunks_to_chat_message(chunk, chunks)]
431+
last_chunk = chunk
432+
433+
if not last_chunk:
434+
raise ValueError("No chunks received from the stream")
435+
436+
return [self._convert_streaming_chunks_to_chat_message(last_chunk, chunks)]
431437

432438
async def _handle_async_stream_response(
433439
self, chat_completion: AsyncStream, callback: AsyncStreamingCallbackT
434440
) -> List[ChatMessage]:
435441
chunks: List[StreamingChunk] = []
436442
chunk = None
437443
chunk_delta: StreamingChunk
444+
last_chunk: Optional[ChatCompletionChunk] = None
438445

439446
async for chunk in chat_completion: # pylint: disable=not-an-iterable
440447
assert len(chunk.choices) <= 1, "Streaming responses should have at most one choice."
441448
chunk_delta = self._convert_chat_completion_chunk_to_streaming_chunk(chunk)
442449
chunks.append(chunk_delta)
443450
await callback(chunk_delta)
444-
return [self._convert_streaming_chunks_to_chat_message(chunk, chunks)]
451+
last_chunk = chunk
452+
453+
if not last_chunk:
454+
raise ValueError("No chunks received from the stream")
455+
456+
return [self._convert_streaming_chunks_to_chat_message(last_chunk, chunks)]
445457

446458
def _check_finish_reason(self, meta: Dict[str, Any]) -> None:
447459
if meta["finish_reason"] == "length":
Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
---
2+
fixes:
3+
- |
4+
Fixed a mypy issue in the OpenAIChatGenerator and its handling of stream responses. This issue only occurs with mypy >=1.16.0.

0 commit comments

Comments
 (0)