Skip to content

Don't send the "store" param unless its hitting OpenAI #455

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Apr 7, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 9 additions & 5 deletions src/agents/models/openai_chatcompletions.py
Original file line number Diff line number Diff line change
Expand Up @@ -518,10 +518,8 @@ async def _fetch_response(
f"Response format: {response_format}\n"
)

# Match the behavior of Responses where store is True when not given
store = model_settings.store if model_settings.store is not None else True

reasoning_effort = model_settings.reasoning.effort if model_settings.reasoning else None
store = _Converter.get_store_param(self._get_client(), model_settings)

ret = await self._get_client().chat.completions.create(
model=self.model,
Expand All @@ -537,10 +535,10 @@ async def _fetch_response(
parallel_tool_calls=parallel_tool_calls,
stream=stream,
stream_options={"include_usage": True} if stream else NOT_GIVEN,
store=store,
store=self._non_null_or_not_given(store),
reasoning_effort=self._non_null_or_not_given(reasoning_effort),
extra_headers=_HEADERS,
metadata=model_settings.metadata,
metadata=self._non_null_or_not_given(model_settings.metadata),
)

if isinstance(ret, ChatCompletion):
Expand Down Expand Up @@ -570,6 +568,12 @@ def _get_client(self) -> AsyncOpenAI:


class _Converter:
@classmethod
def get_store_param(cls, client: AsyncOpenAI, model_settings: ModelSettings) -> bool | None:
# Match the behavior of Responses where store is True when not given
default_store = True if str(client.base_url).startswith("https://api.openai.com") else None
return model_settings.store if model_settings.store is not None else default_store

@classmethod
def convert_tool_choice(
cls, tool_choice: Literal["auto", "required", "none"] | str | None
Expand Down
43 changes: 40 additions & 3 deletions tests/test_openai_chatcompletions.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

import httpx
import pytest
from openai import NOT_GIVEN
from openai import NOT_GIVEN, AsyncOpenAI
from openai.types.chat.chat_completion import ChatCompletion, Choice
from openai.types.chat.chat_completion_chunk import ChatCompletionChunk
from openai.types.chat.chat_completion_message import ChatCompletionMessage
Expand All @@ -31,6 +31,7 @@
generation_span,
)
from agents.models.fake_id import FAKE_RESPONSES_ID
from agents.models.openai_chatcompletions import _Converter


@pytest.mark.allow_call_model_methods
Expand Down Expand Up @@ -226,7 +227,7 @@ def __init__(self, completions: DummyCompletions) -> None:
# Ensure expected args were passed through to OpenAI client.
kwargs = completions.kwargs
assert kwargs["stream"] is False
assert kwargs["store"] is True
assert kwargs["store"] is NOT_GIVEN
assert kwargs["model"] == "gpt-4"
assert kwargs["messages"][0]["role"] == "system"
assert kwargs["messages"][0]["content"] == "sys"
Expand Down Expand Up @@ -280,7 +281,7 @@ def __init__(self, completions: DummyCompletions) -> None:
)
# Check OpenAI client was called for streaming
assert completions.kwargs["stream"] is True
assert completions.kwargs["store"] is True
assert completions.kwargs["store"] is NOT_GIVEN
assert completions.kwargs["stream_options"] == {"include_usage": True}
# Response is a proper openai Response
assert isinstance(response, Response)
Expand All @@ -290,3 +291,39 @@ def __init__(self, completions: DummyCompletions) -> None:
assert response.output == []
# We returned the async iterator produced by our dummy.
assert hasattr(stream, "__aiter__")


def test_store_param():
"""Should default to True for OpenAI API calls, and False otherwise."""

model_settings = ModelSettings()
client = AsyncOpenAI()
assert _Converter.get_store_param(client, model_settings) is True, (
"Should default to True for OpenAI API calls"
)

model_settings = ModelSettings(store=False)
assert _Converter.get_store_param(client, model_settings) is False, (
"Should respect explicitly set store=False"
)

model_settings = ModelSettings(store=True)
assert _Converter.get_store_param(client, model_settings) is True, (
"Should respect explicitly set store=True"
)

client = AsyncOpenAI(base_url="http://www.notopenai.com")
model_settings = ModelSettings()
assert _Converter.get_store_param(client, model_settings) is None, (
"Should default to None for non-OpenAI API calls"
)

model_settings = ModelSettings(store=False)
assert _Converter.get_store_param(client, model_settings) is False, (
"Should respect explicitly set store=False"
)

model_settings = ModelSettings(store=True)
assert _Converter.get_store_param(client, model_settings) is True, (
"Should respect explicitly set store=True"
)
2 changes: 1 addition & 1 deletion uv.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.