Skip to content

Commit

Permalink
Revert "Add fix for include_usage completion chunk"
Browse files Browse the repository at this point in the history
This reverts commit 682da59.
  • Loading branch information
Amnah199 committed Mar 4, 2025
1 parent 682da59 commit c9ff9a0
Showing 1 changed file with 5 additions and 26 deletions.
31 changes: 5 additions & 26 deletions haystack/components/generators/chat/openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -416,16 +416,12 @@ async def _handle_async_stream_response(
chunk = None

async for chunk in chat_completion: # pylint: disable=not-an-iterable
# choices is an empty array for usage_chunk when include_usage is set to True
if chunk.usage is not None:
chunk_delta = self._convert_usage_chunk_to_streaming_chunk(chunk)

else:
assert len(chunk.choices) == 1, "Streaming responses should have only one choice."
chunk_delta: StreamingChunk = self._convert_chat_completion_chunk_to_streaming_chunk(chunk)
assert len(chunk.choices) == 1, "Streaming responses should have only one choice."
chunk_delta: StreamingChunk = self._convert_chat_completion_chunk_to_streaming_chunk(chunk)
chunks.append(chunk_delta)

await callback(chunk_delta)

return [self._convert_streaming_chunks_to_chat_message(chunk, chunks)]

def _check_finish_reason(self, meta: Dict[str, Any]) -> None:
Expand Down Expand Up @@ -490,14 +486,12 @@ def _convert_streaming_chunks_to_chat_message(
_arguments=call_data["arguments"],
)

finish_reason = (chunks[-2] if chunk.usage else chunks[-1]).meta.get("finish_reason")

meta = {
"model": chunk.model,
"index": 0,
"finish_reason": finish_reason,
"finish_reason": chunk.choices[0].finish_reason,
"completion_start_time": chunks[0].meta.get("received_at"), # first chunk received
"usage": chunk.usage or {},
"usage": {}, # we don't have usage data for streaming responses
}

return ChatMessage.from_assistant(text=text or None, tool_calls=tool_calls, meta=meta)
Expand Down Expand Up @@ -565,18 +559,3 @@ def _convert_chat_completion_chunk_to_streaming_chunk(self, chunk: ChatCompletio
}
)
return chunk_message

def _convert_usage_chunk_to_streaming_chunk(self, chunk: ChatCompletionChunk) -> StreamingChunk:
"""
Converts the usage chunk from the OpenAI API to a StreamingChunk.
:param chunk: The usage chunk returned by the OpenAI API.
:returns:
The StreamingChunk.
"""
chunk_message = StreamingChunk(content="")
chunk_message.meta.update(
{"model": chunk.model, "usage": chunk.usage, "received_at": datetime.now().isoformat()}
)
return chunk_message

0 comments on commit c9ff9a0

Please sign in to comment.