From b8391495f85e17f89c3759fee8422e01a27d8256 Mon Sep 17 00:00:00 2001 From: Drew Youngwerth Date: Thu, 3 Apr 2025 13:17:02 -0700 Subject: [PATCH] Add metadata to ModelSettings --- src/agents/model_settings.py | 3 +++ src/agents/models/openai_chatcompletions.py | 1 + src/agents/models/openai_responses.py | 1 + 3 files changed, 5 insertions(+) diff --git a/src/agents/model_settings.py b/src/agents/model_settings.py index 2b0885ab..bef7b9a6 100644 --- a/src/agents/model_settings.py +++ b/src/agents/model_settings.py @@ -40,6 +40,9 @@ class ModelSettings: max_tokens: int | None = None """The maximum number of output tokens to generate.""" + metadata: dict[str, str] | None = None + """Metadata to include with the model response call.""" + store: bool | None = None """Whether to store the generated model response for later retrieval. Defaults to True if not provided.""" diff --git a/src/agents/models/openai_chatcompletions.py b/src/agents/models/openai_chatcompletions.py index 2ba18872..f90140bf 100644 --- a/src/agents/models/openai_chatcompletions.py +++ b/src/agents/models/openai_chatcompletions.py @@ -537,6 +537,7 @@ async def _fetch_response( stream_options={"include_usage": True} if stream else NOT_GIVEN, store=store, extra_headers=_HEADERS, + metadata=model_settings.metadata, ) if isinstance(ret, ChatCompletion): diff --git a/src/agents/models/openai_responses.py b/src/agents/models/openai_responses.py index 17803fda..0a153910 100644 --- a/src/agents/models/openai_responses.py +++ b/src/agents/models/openai_responses.py @@ -247,6 +247,7 @@ async def _fetch_response( extra_headers=_HEADERS, text=response_format, store=self._non_null_or_not_given(model_settings.store), + metadata=model_settings.metadata, ) def _get_client(self) -> AsyncOpenAI: