From 5808bec82517a6c908acd5cdb07524589bf9335b Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Thu, 10 Jul 2025 11:10:06 +0200 Subject: [PATCH] Fix custom model name --- .../openai_agents/spans/ai_client.py | 3 +- .../integrations/openai_agents/utils.py | 3 +- .../openai_agents/test_openai_agents.py | 54 +++++++++++++++++++ 3 files changed, 58 insertions(+), 2 deletions(-) diff --git a/sentry_sdk/integrations/openai_agents/spans/ai_client.py b/sentry_sdk/integrations/openai_agents/spans/ai_client.py index 30c5fd1dac..d325ae86e3 100644 --- a/sentry_sdk/integrations/openai_agents/spans/ai_client.py +++ b/sentry_sdk/integrations/openai_agents/spans/ai_client.py @@ -19,9 +19,10 @@ def ai_client_span(agent, get_response_kwargs): # type: (Agent, dict[str, Any]) -> sentry_sdk.tracing.Span # TODO-anton: implement other types of operations. Now "chat" is hardcoded. + model_name = agent.model.model if hasattr(agent.model, "model") else agent.model span = sentry_sdk.start_span( op=OP.GEN_AI_CHAT, - description=f"chat {agent.model}", + description=f"chat {model_name}", origin=SPAN_ORIGIN, ) # TODO-anton: remove hardcoded stuff and replace something that also works for embedding and so on diff --git a/sentry_sdk/integrations/openai_agents/utils.py b/sentry_sdk/integrations/openai_agents/utils.py index 28dbd6bb75..dc66521c83 100644 --- a/sentry_sdk/integrations/openai_agents/utils.py +++ b/sentry_sdk/integrations/openai_agents/utils.py @@ -53,7 +53,8 @@ def _set_agent_data(span, agent): ) if agent.model: - span.set_data(SPANDATA.GEN_AI_REQUEST_MODEL, agent.model) + model_name = agent.model.model if hasattr(agent.model, "model") else agent.model + span.set_data(SPANDATA.GEN_AI_REQUEST_MODEL, model_name) if agent.model_settings.presence_penalty: span.set_data( diff --git a/tests/integrations/openai_agents/test_openai_agents.py b/tests/integrations/openai_agents/test_openai_agents.py index ec606c8806..37a066aeca 100644 --- a/tests/integrations/openai_agents/test_openai_agents.py +++ b/tests/integrations/openai_agents/test_openai_agents.py @@ -74,6 +74,24 @@ def test_agent(): ) +@pytest.fixture +def test_agent_custom_model(): + """Create a real Agent instance for testing.""" + return Agent( + name="test_agent_custom_model", + instructions="You are a helpful test assistant.", + # the model could be agents.OpenAIChatCompletionsModel() + model=MagicMock(model="my-custom-model"), + model_settings=ModelSettings( + max_tokens=100, + temperature=0.7, + top_p=1.0, + presence_penalty=0.0, + frequency_penalty=0.0, + ), + ) + + @pytest.mark.asyncio async def test_agent_invocation_span( sentry_init, capture_events, test_agent, mock_model_response @@ -128,6 +146,42 @@ async def test_agent_invocation_span( assert ai_client_span["data"]["gen_ai.request.top_p"] == 1.0 +@pytest.mark.asyncio +async def test_client_span_custom_model( + sentry_init, capture_events, test_agent_custom_model, mock_model_response +): + """ + Test that the integration uses the correct model name if a custom model is used. + """ + + with patch.dict(os.environ, {"OPENAI_API_KEY": "test-key"}): + with patch( + "agents.models.openai_responses.OpenAIResponsesModel.get_response" + ) as mock_get_response: + mock_get_response.return_value = mock_model_response + + sentry_init( + integrations=[OpenAIAgentsIntegration()], + traces_sample_rate=1.0, + ) + + events = capture_events() + + result = await agents.Runner.run( + test_agent_custom_model, "Test input", run_config=test_run_config + ) + + assert result is not None + assert result.final_output == "Hello, how can I help you?" + + (transaction,) = events + spans = transaction["spans"] + _, ai_client_span = spans + + assert ai_client_span["description"] == "chat my-custom-model" + assert ai_client_span["data"]["gen_ai.request.model"] == "my-custom-model" + + def test_agent_invocation_span_sync( sentry_init, capture_events, test_agent, mock_model_response ):