Skip to content

Commit

Permalink
Add fix for include_usage completion chunk
Browse files Browse the repository at this point in the history
  • Loading branch information
Amnah199 committed Mar 4, 2025
1 parent fbed234 commit 682da59
Showing 1 changed file with 26 additions and 5 deletions.
31 changes: 26 additions & 5 deletions haystack/components/generators/chat/openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -416,12 +416,16 @@ async def _handle_async_stream_response(
chunk = None

async for chunk in chat_completion: # pylint: disable=not-an-iterable
assert len(chunk.choices) == 1, "Streaming responses should have only one choice."
chunk_delta: StreamingChunk = self._convert_chat_completion_chunk_to_streaming_chunk(chunk)
# choices is an empty array for usage_chunk when include_usage is set to True
if chunk.usage is not None:
chunk_delta = self._convert_usage_chunk_to_streaming_chunk(chunk)

else:
assert len(chunk.choices) == 1, "Streaming responses should have only one choice."
chunk_delta: StreamingChunk = self._convert_chat_completion_chunk_to_streaming_chunk(chunk)
chunks.append(chunk_delta)

await callback(chunk_delta)

return [self._convert_streaming_chunks_to_chat_message(chunk, chunks)]

def _check_finish_reason(self, meta: Dict[str, Any]) -> None:
Expand Down Expand Up @@ -486,12 +490,14 @@ def _convert_streaming_chunks_to_chat_message(
_arguments=call_data["arguments"],
)

finish_reason = (chunks[-2] if chunk.usage else chunks[-1]).meta.get("finish_reason")

meta = {
"model": chunk.model,
"index": 0,
"finish_reason": chunk.choices[0].finish_reason,
"finish_reason": finish_reason,
"completion_start_time": chunks[0].meta.get("received_at"), # first chunk received
"usage": {}, # we don't have usage data for streaming responses
"usage": chunk.usage or {},
}

return ChatMessage.from_assistant(text=text or None, tool_calls=tool_calls, meta=meta)
Expand Down Expand Up @@ -559,3 +565,18 @@ def _convert_chat_completion_chunk_to_streaming_chunk(self, chunk: ChatCompletio
}
)
return chunk_message

def _convert_usage_chunk_to_streaming_chunk(self, chunk: ChatCompletionChunk) -> StreamingChunk:
"""
Converts the usage chunk from the OpenAI API to a StreamingChunk.
:param chunk: The usage chunk returned by the OpenAI API.
:returns:
The StreamingChunk.
"""
chunk_message = StreamingChunk(content="")
chunk_message.meta.update(
{"model": chunk.model, "usage": chunk.usage, "received_at": datetime.now().isoformat()}
)
return chunk_message

0 comments on commit 682da59

Please sign in to comment.