Skip to content

Commit 2bcc864

Browse files
authored
Don't send the "store" param unless its hitting OpenAI (#455)
Summary: See #443. Causes issues with Gemini. Test Plan: Tests. Also tested with Gemini to ensure it works.
1 parent 50bbfdd commit 2bcc864

File tree

3 files changed

+50
-9
lines changed

3 files changed

+50
-9
lines changed

src/agents/models/openai_chatcompletions.py

+9-5
Original file line numberDiff line numberDiff line change
@@ -518,10 +518,8 @@ async def _fetch_response(
518518
f"Response format: {response_format}\n"
519519
)
520520

521-
# Match the behavior of Responses where store is True when not given
522-
store = model_settings.store if model_settings.store is not None else True
523-
524521
reasoning_effort = model_settings.reasoning.effort if model_settings.reasoning else None
522+
store = _Converter.get_store_param(self._get_client(), model_settings)
525523

526524
ret = await self._get_client().chat.completions.create(
527525
model=self.model,
@@ -537,10 +535,10 @@ async def _fetch_response(
537535
parallel_tool_calls=parallel_tool_calls,
538536
stream=stream,
539537
stream_options={"include_usage": True} if stream else NOT_GIVEN,
540-
store=store,
538+
store=self._non_null_or_not_given(store),
541539
reasoning_effort=self._non_null_or_not_given(reasoning_effort),
542540
extra_headers=_HEADERS,
543-
metadata=model_settings.metadata,
541+
metadata=self._non_null_or_not_given(model_settings.metadata),
544542
)
545543

546544
if isinstance(ret, ChatCompletion):
@@ -570,6 +568,12 @@ def _get_client(self) -> AsyncOpenAI:
570568

571569

572570
class _Converter:
571+
@classmethod
572+
def get_store_param(cls, client: AsyncOpenAI, model_settings: ModelSettings) -> bool | None:
573+
# Match the behavior of Responses where store is True when not given
574+
default_store = True if str(client.base_url).startswith("https://api.openai.com") else None
575+
return model_settings.store if model_settings.store is not None else default_store
576+
573577
@classmethod
574578
def convert_tool_choice(
575579
cls, tool_choice: Literal["auto", "required", "none"] | str | None

tests/test_openai_chatcompletions.py

+40-3
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55

66
import httpx
77
import pytest
8-
from openai import NOT_GIVEN
8+
from openai import NOT_GIVEN, AsyncOpenAI
99
from openai.types.chat.chat_completion import ChatCompletion, Choice
1010
from openai.types.chat.chat_completion_chunk import ChatCompletionChunk
1111
from openai.types.chat.chat_completion_message import ChatCompletionMessage
@@ -31,6 +31,7 @@
3131
generation_span,
3232
)
3333
from agents.models.fake_id import FAKE_RESPONSES_ID
34+
from agents.models.openai_chatcompletions import _Converter
3435

3536

3637
@pytest.mark.allow_call_model_methods
@@ -226,7 +227,7 @@ def __init__(self, completions: DummyCompletions) -> None:
226227
# Ensure expected args were passed through to OpenAI client.
227228
kwargs = completions.kwargs
228229
assert kwargs["stream"] is False
229-
assert kwargs["store"] is True
230+
assert kwargs["store"] is NOT_GIVEN
230231
assert kwargs["model"] == "gpt-4"
231232
assert kwargs["messages"][0]["role"] == "system"
232233
assert kwargs["messages"][0]["content"] == "sys"
@@ -280,7 +281,7 @@ def __init__(self, completions: DummyCompletions) -> None:
280281
)
281282
# Check OpenAI client was called for streaming
282283
assert completions.kwargs["stream"] is True
283-
assert completions.kwargs["store"] is True
284+
assert completions.kwargs["store"] is NOT_GIVEN
284285
assert completions.kwargs["stream_options"] == {"include_usage": True}
285286
# Response is a proper openai Response
286287
assert isinstance(response, Response)
@@ -290,3 +291,39 @@ def __init__(self, completions: DummyCompletions) -> None:
290291
assert response.output == []
291292
# We returned the async iterator produced by our dummy.
292293
assert hasattr(stream, "__aiter__")
294+
295+
296+
def test_store_param():
297+
"""Should default to True for OpenAI API calls, and False otherwise."""
298+
299+
model_settings = ModelSettings()
300+
client = AsyncOpenAI()
301+
assert _Converter.get_store_param(client, model_settings) is True, (
302+
"Should default to True for OpenAI API calls"
303+
)
304+
305+
model_settings = ModelSettings(store=False)
306+
assert _Converter.get_store_param(client, model_settings) is False, (
307+
"Should respect explicitly set store=False"
308+
)
309+
310+
model_settings = ModelSettings(store=True)
311+
assert _Converter.get_store_param(client, model_settings) is True, (
312+
"Should respect explicitly set store=True"
313+
)
314+
315+
client = AsyncOpenAI(base_url="http://www.notopenai.com")
316+
model_settings = ModelSettings()
317+
assert _Converter.get_store_param(client, model_settings) is None, (
318+
"Should default to None for non-OpenAI API calls"
319+
)
320+
321+
model_settings = ModelSettings(store=False)
322+
assert _Converter.get_store_param(client, model_settings) is False, (
323+
"Should respect explicitly set store=False"
324+
)
325+
326+
model_settings = ModelSettings(store=True)
327+
assert _Converter.get_store_param(client, model_settings) is True, (
328+
"Should respect explicitly set store=True"
329+
)

uv.lock

+1-1
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

0 commit comments

Comments
 (0)