Skip to content
Open
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 25 additions & 0 deletions src/openai/_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -209,6 +209,31 @@ def to_json(
warnings=warnings,
)

def as_input(
self,
*,
warnings: bool = True,
) -> dict[str, object]:
"""Serializes this model for reuse as an API input item.

This drops fields that were never set, filters `None` values, and respects
any model-level `__api_exclude__` hints used by request serialization.
"""
data = self.model_dump(
mode="json",
by_alias=True,
exclude_unset=True,
exclude_none=True,
warnings=warnings,
)

exclude = getattr(self, "__api_exclude__", None)
if exclude:
for key in exclude:
data.pop(key, None)

return cast("dict[str, object]", data)

@override
def __str__(self) -> str:
# mypy complains about an invalid self arg
Expand Down
2 changes: 2 additions & 0 deletions src/openai/types/responses/response_output_message.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@
class ResponseOutputMessage(BaseModel):
"""An output message from the model."""

__api_exclude__ = {"status"}
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

P1 Badge Preserve required status on reused assistant messages

ResponseOutputMessageParam still declares status as required for responses.create inputs (response_output_message_param.py), but adding __api_exclude__ = {"status"} here makes both maybe_transform(...) (via _transform_recursive on BaseModel) and as_input() strip that field unconditionally. In flows that reuse ResponseOutputMessage objects as next-turn input, this now emits payloads that no longer satisfy the generated request schema and can be rejected by strict server-side validation.

Useful? React with 👍 / 👎.


id: str
"""The unique ID of the output message."""

Expand Down
2 changes: 2 additions & 0 deletions src/openai/types/responses/response_reasoning_item.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,8 @@ class ResponseReasoningItem(BaseModel):
[managing context](https://platform.openai.com/docs/guides/conversation-state).
"""

__api_exclude__ = {"status"}

id: str
"""The unique identifier of the reasoning content."""

Expand Down
66 changes: 65 additions & 1 deletion tests/test_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,11 @@
import pydantic
from pydantic import Field

from openai._utils import PropertyInfo
from openai._utils import PropertyInfo, maybe_transform
from openai._compat import PYDANTIC_V1, parse_obj, model_dump, model_json
from openai._models import DISCRIMINATOR_CACHE, BaseModel, construct_type
from openai.types.responses import ResponseOutputMessage, ResponseReasoningItem
from openai.types.responses.response_create_params import ResponseCreateParamsNonStreaming


class BasicModel(BaseModel):
Expand Down Expand Up @@ -572,6 +574,68 @@ class Model(BaseModel):
assert isinstance(model_dump(m, warnings=False), dict)


def test_as_input_response_items() -> None:
reasoning = ResponseReasoningItem.construct(
id="rs_123",
type="reasoning",
summary=[{"text": "Reasoning summary", "type": "summary_text"}],
status=None,
encrypted_content=None,
)
message = ResponseOutputMessage.construct(
id="msg_123",
type="message",
role="assistant",
status="completed",
phase="final_answer",
content=[{"type": "output_text", "annotations": [], "text": "Paris"}],
)

assert reasoning.as_input() == {
"id": "rs_123",
"type": "reasoning",
"summary": [{"text": "Reasoning summary", "type": "summary_text"}],
}
assert message.as_input() == {
"id": "msg_123",
"type": "message",
"role": "assistant",
"phase": "final_answer",
"content": [{"type": "output_text", "annotations": [], "text": "Paris"}],
}


def test_request_transform_respects_api_exclude_when_reusing_response_items() -> None:
reasoning = ResponseReasoningItem.construct(
id="rs_123",
type="reasoning",
summary=[{"text": "Reasoning summary", "type": "summary_text"}],
status="completed",
)
message = ResponseOutputMessage.construct(
id="msg_123",
type="message",
role="assistant",
status="completed",
content=[{"type": "output_text", "annotations": [], "text": "Paris"}],
)

transformed = maybe_transform(
{
"input": [reasoning, message],
"model": "o4-mini",
"stream": False,
},
ResponseCreateParamsNonStreaming,
)

assert transformed == {
"input": [reasoning.as_input(), message.as_input()],
"model": "o4-mini",
"stream": False,
}


def test_to_json() -> None:
class Model(BaseModel):
foo: Optional[str] = Field(alias="FOO", default=None)
Expand Down