diff --git a/pyproject.toml b/pyproject.toml index 963a8cb1aa..7b32c49f3f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -140,7 +140,7 @@ replacement = '[\1](https://github.com/openai/openai-python/tree/main/\g<2>)' [tool.pytest.ini_options] testpaths = ["tests"] -addopts = "--tb=short -n auto" +addopts = "--tb=short" xfail_strict = true asyncio_mode = "auto" asyncio_default_fixture_loop_scope = "session" @@ -224,4 +224,4 @@ known-first-party = ["openai", "tests"] "bin/**.py" = ["T201", "T203"] "scripts/**.py" = ["T201", "T203"] "tests/**.py" = ["T201", "T203"] -"examples/**.py" = ["T201", "T203"] +"examples/**.py" = ["T201", "T203"] \ No newline at end of file diff --git a/src/openai/resources/chat/completions/completions.py b/src/openai/resources/chat/completions/completions.py index a6b89fc833..2d7c289de1 100644 --- a/src/openai/resources/chat/completions/completions.py +++ b/src/openai/resources/chat/completions/completions.py @@ -2358,4 +2358,4 @@ def validate_response_format(response_format: object) -> None: if inspect.isclass(response_format) and issubclass(response_format, pydantic.BaseModel): raise TypeError( "You tried to pass a `BaseModel` class to `chat.completions.create()`; You must use `beta.chat.completions.parse()` instead" - ) + ) \ No newline at end of file diff --git a/src/openai/types/chat/__init__.py b/src/openai/types/chat/__init__.py index 0945bcad11..02c3c53609 100644 --- a/src/openai/types/chat/__init__.py +++ b/src/openai/types/chat/__init__.py @@ -48,6 +48,8 @@ ) from .chat_completion_message_tool_call_param import ( ChatCompletionMessageToolCallParam as ChatCompletionMessageToolCallParam, + FunctionCallParamDetails as FunctionCallParamDetails, + FunctionToolCallParam as FunctionToolCallParam, ) from .chat_completion_named_tool_choice_param import ( ChatCompletionNamedToolChoiceParam as ChatCompletionNamedToolChoiceParam, @@ -70,3 +72,60 @@ from .chat_completion_content_part_input_audio_param import ( ChatCompletionContentPartInputAudioParam as ChatCompletionContentPartInputAudioParam, ) +from .code_interpreter_tool_call_param import ( + CodeInterpreterCallParam as CodeInterpreterCallParam, + CodeInterpreterOutputImageFileParam as CodeInterpreterOutputImageFileParam, + CodeInterpreterOutputImageParam as CodeInterpreterOutputImageParam, + CodeInterpreterOutputLogParam as CodeInterpreterOutputLogParam, + CodeInterpreterOutputParam as CodeInterpreterOutputParam, +) + +__all__ = [ + "ChatCompletion", + "ChatCompletionRole", + "ChatCompletionTool", + "ChatCompletionAudio", + "ChatCompletionChunk", + "CompletionListParams", + "ParsedChoice", + "ParsedChatCompletion", + "ParsedChatCompletionMessage", + "ChatCompletionDeleted", + "ChatCompletionMessage", + "ChatCompletionModality", + "CompletionCreateParams", + "CompletionUpdateParams", + "ParsedFunction", + "ParsedFunctionToolCall", + "ChatCompletionToolParam", + "ChatCompletionAudioParam", + "ChatCompletionMessageParam", + "ChatCompletionStoreMessage", + "ChatCompletionTokenLogprob", + "ChatCompletionReasoningEffort", + "ChatCompletionMessageToolCall", + "ChatCompletionContentPartParam", + "ChatCompletionToolMessageParam", + "ChatCompletionUserMessageParam", + "ChatCompletionStreamOptionsParam", + "ChatCompletionSystemMessageParam", + "ChatCompletionFunctionMessageParam", + "ChatCompletionAssistantMessageParam", + "ChatCompletionContentPartTextParam", + "ChatCompletionDeveloperMessageParam", + "FunctionToolCallParam", + "FunctionCallParamDetails", + "ChatCompletionMessageToolCallParam", + "ChatCompletionNamedToolChoiceParam", + "ChatCompletionContentPartImageParam", + "ChatCompletionPredictionContentParam", + "ChatCompletionToolChoiceOptionParam", + "ChatCompletionContentPartRefusalParam", + "ChatCompletionFunctionCallOptionParam", + "ChatCompletionContentPartInputAudioParam", + "CodeInterpreterCallParam", + "CodeInterpreterOutputImageFileParam", + "CodeInterpreterOutputImageParam", + "CodeInterpreterOutputLogParam", + "CodeInterpreterOutputParam", +] \ No newline at end of file diff --git a/src/openai/types/chat/chat_completion_message_tool_call_param.py b/src/openai/types/chat/chat_completion_message_tool_call_param.py index f616c363d0..676a9e4934 100644 --- a/src/openai/types/chat/chat_completion_message_tool_call_param.py +++ b/src/openai/types/chat/chat_completion_message_tool_call_param.py @@ -2,12 +2,15 @@ from __future__ import annotations +from typing import Union from typing_extensions import Literal, Required, TypedDict -__all__ = ["ChatCompletionMessageToolCallParam", "Function"] +from .code_interpreter_tool_call_param import CodeInterpreterCallParam +__all__ = ["FunctionToolCallParam", "FunctionCallParamDetails", "ChatCompletionMessageToolCallParam"] -class Function(TypedDict, total=False): + +class FunctionCallParamDetails(TypedDict, total=False): arguments: Required[str] """ The arguments to call the function with, as generated by the model in JSON @@ -20,12 +23,15 @@ class Function(TypedDict, total=False): """The name of the function to call.""" -class ChatCompletionMessageToolCallParam(TypedDict, total=False): +class FunctionToolCallParam(TypedDict, total=False): id: Required[str] """The ID of the tool call.""" - function: Required[Function] + function: Required[FunctionCallParamDetails] """The function that the model called.""" type: Required[Literal["function"]] """The type of the tool. Currently, only `function` is supported.""" + + +ChatCompletionMessageToolCallParam = Union[FunctionToolCallParam, CodeInterpreterCallParam] \ No newline at end of file diff --git a/src/openai/types/chat/code_interpreter_tool_call_param.py b/src/openai/types/chat/code_interpreter_tool_call_param.py new file mode 100644 index 0000000000..59f13ec3e1 --- /dev/null +++ b/src/openai/types/chat/code_interpreter_tool_call_param.py @@ -0,0 +1,52 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Iterable, Union +from typing_extensions import Literal, Required, TypedDict + +__all__ = [ + "CodeInterpreterOutputLogParam", + "CodeInterpreterOutputImageFileParam", + "CodeInterpreterOutputImageParam", + "CodeInterpreterOutputParam", + "CodeInterpreterCallParam", +] + + +class CodeInterpreterOutputLogParam(TypedDict, total=False): + type: Required[Literal["logs"]] + """Always 'logs' for this output type.""" + + logs: Required[str] + """The text output from the Code Interpreter tool call.""" + + +class CodeInterpreterOutputImageFileParam(TypedDict, total=False): + file_id: Required[str] + """The file ID of the image.""" + + +class CodeInterpreterOutputImageParam(TypedDict, total=False): + type: Required[Literal["image"]] + """Always 'image' for this output type.""" + + image: Required[CodeInterpreterOutputImageFileParam] + """The image output from the Code Interpreter tool call.""" + + +CodeInterpreterOutputParam = Union[CodeInterpreterOutputLogParam, CodeInterpreterOutputImageParam] + + +class CodeInterpreterCallParam(TypedDict, total=False): + id: Required[str] + """The ID of the tool call.""" + + type: Required[Literal["code_interpreter"]] + """Always 'code_interpreter' for this type of tool call.""" + + code: Required[str] + """The input code for the Code Interpreter.""" + + outputs: Required[Iterable[CodeInterpreterOutputParam]] + """The outputs from the Code Interpreter tool call.""" diff --git a/src/openai/types/chat/test_chat_completion_message_params.py b/src/openai/types/chat/test_chat_completion_message_params.py new file mode 100644 index 0000000000..586f5872fd --- /dev/null +++ b/src/openai/types/chat/test_chat_completion_message_params.py @@ -0,0 +1,137 @@ +from __future__ import annotations + +import pytest # noqa: F401 +from typing import Union + +from openai.types.chat import ( + ChatCompletionAssistantMessageParam, + ChatCompletionMessageToolCallParam, # noqa: F401 + FunctionToolCallParam, # noqa: F401 + FunctionCallParamDetails, # noqa: F401 + CodeInterpreterCallParam, # noqa: F401 + CodeInterpreterOutputLogParam, # noqa: F401 + CodeInterpreterOutputImageParam, # noqa: F401 + CodeInterpreterOutputImageFileParam, # noqa: F401 +) +from openai.types.chat.chat_completion_user_message_param import ChatCompletionUserMessageParam +from openai.types.chat.chat_completion_system_message_param import ChatCompletionSystemMessageParam + +def test_can_construct_assistant_message_with_function_tool_call() -> None: + params: ChatCompletionAssistantMessageParam = { + "role": "assistant", + "tool_calls": [ + { + "id": "tool_call_123", + "type": "function", + "function": { + "name": "get_weather", + "arguments": '{"location": "Boston"}', + }, + } + ], + } + assert params["role"] == "assistant" + assert params["tool_calls"] is not None + tool_call = params["tool_calls"][0] # type: ignore + assert tool_call["id"] == "tool_call_123" + assert tool_call["type"] == "function" + # We need to assert that tool_call is FunctionToolCallParam for type checkers + assert "function" in tool_call + assert tool_call["function"]["name"] == "get_weather" # type: ignore + + +def test_can_construct_assistant_message_with_code_interpreter_log_output() -> None: + params: ChatCompletionAssistantMessageParam = { + "role": "assistant", + "tool_calls": [ + { + "id": "tool_call_abc", + "type": "code_interpreter", + "code": "print('Hello World')", + "outputs": [ + { + "type": "logs", + "logs": "Hello World\n", + } + ], + } + ], + } + assert params["role"] == "assistant" + assert params["tool_calls"] is not None + tool_call = params["tool_calls"][0] # type: ignore + assert tool_call["id"] == "tool_call_abc" + assert tool_call["type"] == "code_interpreter" + # We need to assert that tool_call is CodeInterpreterCallParam for type checkers + assert "code" in tool_call + assert tool_call["code"] == "print('Hello World')" # type: ignore + assert tool_call["outputs"] is not None # type: ignore + output = tool_call["outputs"][0] # type: ignore + assert output["type"] == "logs" + assert "logs" in output # For type checker + assert output["logs"] == "Hello World\n" # type: ignore + + +def test_can_construct_assistant_message_with_code_interpreter_image_output() -> None: + params: ChatCompletionAssistantMessageParam = { + "role": "assistant", + "tool_calls": [ + { + "id": "tool_call_xyz", + "type": "code_interpreter", + "code": "# generate image", + "outputs": [ + { + "type": "image", + "image": { + "file_id": "file_def456", + }, + } + ], + } + ], + } + assert params["role"] == "assistant" + assert params["tool_calls"] is not None + tool_call = params["tool_calls"][0] # type: ignore + assert tool_call["id"] == "tool_call_xyz" + assert tool_call["type"] == "code_interpreter" + assert "code" in tool_call # For type checker + assert tool_call["code"] == "# generate image" # type: ignore + assert tool_call["outputs"] is not None # type: ignore + output = tool_call["outputs"][0] # type: ignore + assert output["type"] == "image" + assert "image" in output # For type checker + assert output["image"]["file_id"] == "file_def456" # type: ignore + +# Example of constructing a list of messages for ChatCompletionCreateParams +def test_message_list_construction() -> None: + messages: list[ + Union[ + ChatCompletionUserMessageParam, + ChatCompletionSystemMessageParam, + ChatCompletionAssistantMessageParam, + # Add other message types if necessary, e.g., ToolMessageParam + ] + ] = [ + {"role": "system", "content": "You are a helpful assistant."}, + {"role": "user", "content": "Hello!"}, + { + "role": "assistant", + "tool_calls": [ + { + "id": "tool_call_xyz", + "type": "code_interpreter", + "code": "# generate image", + "outputs": [ + { + "type": "image", + "image": {"file_id": "file_def456"}, + } + ], + } + ], + }, + ] + assert len(messages) == 3 + assert messages[2]["role"] == "assistant" # type: ignore \ No newline at end of file