From bbdaa38a3560b21bd267a801bd3480b68f1d3ab0 Mon Sep 17 00:00:00 2001 From: Stainless Bot Date: Fri, 29 Mar 2024 21:08:20 +0000 Subject: [PATCH] feat(api): adding temperature parameter --- .../beta/threads/messages/messages.py | 22 +++++++--- .../resources/beta/threads/runs/runs.py | 42 +++++++++++++++++++ src/openai/resources/beta/threads/threads.py | 42 +++++++++++++++++++ .../beta/thread_create_and_run_params.py | 16 +++++-- src/openai/types/beta/thread_create_params.py | 9 ++-- src/openai/types/beta/threads/message.py | 6 +-- .../beta/threads/message_create_params.py | 9 ++-- src/openai/types/beta/threads/run.py | 3 ++ .../types/beta/threads/run_create_params.py | 7 ++++ tests/api_resources/beta/test_threads.py | 4 ++ tests/api_resources/beta/threads/test_runs.py | 4 ++ 11 files changed, 146 insertions(+), 18 deletions(-) diff --git a/src/openai/resources/beta/threads/messages/messages.py b/src/openai/resources/beta/threads/messages/messages.py index 21e8bca5b8..1c008a7cc4 100644 --- a/src/openai/resources/beta/threads/messages/messages.py +++ b/src/openai/resources/beta/threads/messages/messages.py @@ -52,7 +52,7 @@ def create( thread_id: str, *, content: str, - role: Literal["user"], + role: Literal["user", "assistant"], file_ids: List[str] | NotGiven = NOT_GIVEN, metadata: Optional[object] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. @@ -68,8 +68,13 @@ def create( Args: content: The content of the message. - role: The role of the entity that is creating the message. Currently only `user` is - supported. + role: + The role of the entity that is creating the message. Allowed values include: + + - `user`: Indicates the message is sent by an actual user and should be used in + most cases to represent user-generated messages. + - `assistant`: Indicates the message is generated by the assistant. Use this + value to insert messages from the assistant into the conversation. file_ids: A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that the message should use. There can be a maximum of 10 files attached to a @@ -276,7 +281,7 @@ async def create( thread_id: str, *, content: str, - role: Literal["user"], + role: Literal["user", "assistant"], file_ids: List[str] | NotGiven = NOT_GIVEN, metadata: Optional[object] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. @@ -292,8 +297,13 @@ async def create( Args: content: The content of the message. - role: The role of the entity that is creating the message. Currently only `user` is - supported. + role: + The role of the entity that is creating the message. Allowed values include: + + - `user`: Indicates the message is sent by an actual user and should be used in + most cases to represent user-generated messages. + - `assistant`: Indicates the message is generated by the assistant. Use this + value to insert messages from the assistant into the conversation. file_ids: A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that the message should use. There can be a maximum of 10 files attached to a diff --git a/src/openai/resources/beta/threads/runs/runs.py b/src/openai/resources/beta/threads/runs/runs.py index afa447612c..ab39a96a8d 100644 --- a/src/openai/resources/beta/threads/runs/runs.py +++ b/src/openai/resources/beta/threads/runs/runs.py @@ -76,6 +76,7 @@ def create( metadata: Optional[object] | NotGiven = NOT_GIVEN, model: Optional[str] | NotGiven = NOT_GIVEN, stream: Optional[Literal[False]] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, tools: Optional[Iterable[AssistantToolParam]] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. @@ -114,6 +115,10 @@ def create( events, terminating when the Run enters a terminal state with a `data: [DONE]` message. + temperature: What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + make the output more random, while lower values like 0.2 will make it more + focused and deterministic. + tools: Override the tools the assistant can use for this run. This is useful for modifying the behavior on a per-run basis. @@ -138,6 +143,7 @@ def create( instructions: Optional[str] | NotGiven = NOT_GIVEN, metadata: Optional[object] | NotGiven = NOT_GIVEN, model: Optional[str] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, tools: Optional[Iterable[AssistantToolParam]] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. @@ -176,6 +182,10 @@ def create( model associated with the assistant. If not, the model associated with the assistant will be used. + temperature: What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + make the output more random, while lower values like 0.2 will make it more + focused and deterministic. + tools: Override the tools the assistant can use for this run. This is useful for modifying the behavior on a per-run basis. @@ -200,6 +210,7 @@ def create( instructions: Optional[str] | NotGiven = NOT_GIVEN, metadata: Optional[object] | NotGiven = NOT_GIVEN, model: Optional[str] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, tools: Optional[Iterable[AssistantToolParam]] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. @@ -238,6 +249,10 @@ def create( model associated with the assistant. If not, the model associated with the assistant will be used. + temperature: What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + make the output more random, while lower values like 0.2 will make it more + focused and deterministic. + tools: Override the tools the assistant can use for this run. This is useful for modifying the behavior on a per-run basis. @@ -262,6 +277,7 @@ def create( metadata: Optional[object] | NotGiven = NOT_GIVEN, model: Optional[str] | NotGiven = NOT_GIVEN, stream: Optional[Literal[False]] | Literal[True] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, tools: Optional[Iterable[AssistantToolParam]] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. @@ -283,6 +299,7 @@ def create( "metadata": metadata, "model": model, "stream": stream, + "temperature": temperature, "tools": tools, }, run_create_params.RunCreateParams, @@ -489,6 +506,7 @@ def create_and_stream( instructions: Optional[str] | NotGiven = NOT_GIVEN, metadata: Optional[object] | NotGiven = NOT_GIVEN, model: Optional[str] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, tools: Optional[Iterable[AssistantToolParam]] | NotGiven = NOT_GIVEN, thread_id: str, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. @@ -510,6 +528,7 @@ def create_and_stream( instructions: Optional[str] | NotGiven = NOT_GIVEN, metadata: Optional[object] | NotGiven = NOT_GIVEN, model: Optional[str] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, tools: Optional[Iterable[AssistantToolParam]] | NotGiven = NOT_GIVEN, thread_id: str, event_handler: AssistantEventHandlerT, @@ -531,6 +550,7 @@ def create_and_stream( instructions: Optional[str] | NotGiven = NOT_GIVEN, metadata: Optional[object] | NotGiven = NOT_GIVEN, model: Optional[str] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, tools: Optional[Iterable[AssistantToolParam]] | NotGiven = NOT_GIVEN, thread_id: str, event_handler: AssistantEventHandlerT | None = None, @@ -561,6 +581,7 @@ def create_and_stream( "instructions": instructions, "metadata": metadata, "model": model, + "temperature": temperature, "stream": True, "tools": tools, }, @@ -841,6 +862,7 @@ async def create( metadata: Optional[object] | NotGiven = NOT_GIVEN, model: Optional[str] | NotGiven = NOT_GIVEN, stream: Optional[Literal[False]] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, tools: Optional[Iterable[AssistantToolParam]] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. @@ -879,6 +901,10 @@ async def create( events, terminating when the Run enters a terminal state with a `data: [DONE]` message. + temperature: What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + make the output more random, while lower values like 0.2 will make it more + focused and deterministic. + tools: Override the tools the assistant can use for this run. This is useful for modifying the behavior on a per-run basis. @@ -903,6 +929,7 @@ async def create( instructions: Optional[str] | NotGiven = NOT_GIVEN, metadata: Optional[object] | NotGiven = NOT_GIVEN, model: Optional[str] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, tools: Optional[Iterable[AssistantToolParam]] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. @@ -941,6 +968,10 @@ async def create( model associated with the assistant. If not, the model associated with the assistant will be used. + temperature: What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + make the output more random, while lower values like 0.2 will make it more + focused and deterministic. + tools: Override the tools the assistant can use for this run. This is useful for modifying the behavior on a per-run basis. @@ -965,6 +996,7 @@ async def create( instructions: Optional[str] | NotGiven = NOT_GIVEN, metadata: Optional[object] | NotGiven = NOT_GIVEN, model: Optional[str] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, tools: Optional[Iterable[AssistantToolParam]] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. @@ -1003,6 +1035,10 @@ async def create( model associated with the assistant. If not, the model associated with the assistant will be used. + temperature: What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + make the output more random, while lower values like 0.2 will make it more + focused and deterministic. + tools: Override the tools the assistant can use for this run. This is useful for modifying the behavior on a per-run basis. @@ -1027,6 +1063,7 @@ async def create( metadata: Optional[object] | NotGiven = NOT_GIVEN, model: Optional[str] | NotGiven = NOT_GIVEN, stream: Optional[Literal[False]] | Literal[True] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, tools: Optional[Iterable[AssistantToolParam]] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. @@ -1048,6 +1085,7 @@ async def create( "metadata": metadata, "model": model, "stream": stream, + "temperature": temperature, "tools": tools, }, run_create_params.RunCreateParams, @@ -1254,6 +1292,7 @@ def create_and_stream( instructions: Optional[str] | NotGiven = NOT_GIVEN, metadata: Optional[object] | NotGiven = NOT_GIVEN, model: Optional[str] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, tools: Optional[Iterable[AssistantToolParam]] | NotGiven = NOT_GIVEN, thread_id: str, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. @@ -1275,6 +1314,7 @@ def create_and_stream( instructions: Optional[str] | NotGiven = NOT_GIVEN, metadata: Optional[object] | NotGiven = NOT_GIVEN, model: Optional[str] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, tools: Optional[Iterable[AssistantToolParam]] | NotGiven = NOT_GIVEN, thread_id: str, event_handler: AsyncAssistantEventHandlerT, @@ -1296,6 +1336,7 @@ def create_and_stream( instructions: Optional[str] | NotGiven = NOT_GIVEN, metadata: Optional[object] | NotGiven = NOT_GIVEN, model: Optional[str] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, tools: Optional[Iterable[AssistantToolParam]] | NotGiven = NOT_GIVEN, thread_id: str, event_handler: AsyncAssistantEventHandlerT | None = None, @@ -1328,6 +1369,7 @@ def create_and_stream( "instructions": instructions, "metadata": metadata, "model": model, + "temperature": temperature, "stream": True, "tools": tools, }, diff --git a/src/openai/resources/beta/threads/threads.py b/src/openai/resources/beta/threads/threads.py index bcb0da8a62..c2ad6aca5f 100644 --- a/src/openai/resources/beta/threads/threads.py +++ b/src/openai/resources/beta/threads/threads.py @@ -244,6 +244,7 @@ def create_and_run( metadata: Optional[object] | NotGiven = NOT_GIVEN, model: Optional[str] | NotGiven = NOT_GIVEN, stream: Optional[Literal[False]] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, thread: thread_create_and_run_params.Thread | NotGiven = NOT_GIVEN, tools: Optional[Iterable[thread_create_and_run_params.Tool]] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. @@ -278,6 +279,10 @@ def create_and_run( events, terminating when the Run enters a terminal state with a `data: [DONE]` message. + temperature: What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + make the output more random, while lower values like 0.2 will make it more + focused and deterministic. + thread: If no thread is provided, an empty thread will be created. tools: Override the tools the assistant can use for this run. This is useful for @@ -302,6 +307,7 @@ def create_and_run( instructions: Optional[str] | NotGiven = NOT_GIVEN, metadata: Optional[object] | NotGiven = NOT_GIVEN, model: Optional[str] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, thread: thread_create_and_run_params.Thread | NotGiven = NOT_GIVEN, tools: Optional[Iterable[thread_create_and_run_params.Tool]] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. @@ -336,6 +342,10 @@ def create_and_run( model associated with the assistant. If not, the model associated with the assistant will be used. + temperature: What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + make the output more random, while lower values like 0.2 will make it more + focused and deterministic. + thread: If no thread is provided, an empty thread will be created. tools: Override the tools the assistant can use for this run. This is useful for @@ -360,6 +370,7 @@ def create_and_run( instructions: Optional[str] | NotGiven = NOT_GIVEN, metadata: Optional[object] | NotGiven = NOT_GIVEN, model: Optional[str] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, thread: thread_create_and_run_params.Thread | NotGiven = NOT_GIVEN, tools: Optional[Iterable[thread_create_and_run_params.Tool]] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. @@ -394,6 +405,10 @@ def create_and_run( model associated with the assistant. If not, the model associated with the assistant will be used. + temperature: What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + make the output more random, while lower values like 0.2 will make it more + focused and deterministic. + thread: If no thread is provided, an empty thread will be created. tools: Override the tools the assistant can use for this run. This is useful for @@ -418,6 +433,7 @@ def create_and_run( metadata: Optional[object] | NotGiven = NOT_GIVEN, model: Optional[str] | NotGiven = NOT_GIVEN, stream: Optional[Literal[False]] | Literal[True] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, thread: thread_create_and_run_params.Thread | NotGiven = NOT_GIVEN, tools: Optional[Iterable[thread_create_and_run_params.Tool]] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. @@ -437,6 +453,7 @@ def create_and_run( "metadata": metadata, "model": model, "stream": stream, + "temperature": temperature, "thread": thread, "tools": tools, }, @@ -458,6 +475,7 @@ def create_and_run_stream( instructions: Optional[str] | NotGiven = NOT_GIVEN, metadata: Optional[object] | NotGiven = NOT_GIVEN, model: Optional[str] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, thread: thread_create_and_run_params.Thread | NotGiven = NOT_GIVEN, tools: Optional[Iterable[thread_create_and_run_params.Tool]] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. @@ -478,6 +496,7 @@ def create_and_run_stream( instructions: Optional[str] | NotGiven = NOT_GIVEN, metadata: Optional[object] | NotGiven = NOT_GIVEN, model: Optional[str] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, thread: thread_create_and_run_params.Thread | NotGiven = NOT_GIVEN, tools: Optional[Iterable[thread_create_and_run_params.Tool]] | NotGiven = NOT_GIVEN, event_handler: AssistantEventHandlerT, @@ -498,6 +517,7 @@ def create_and_run_stream( instructions: Optional[str] | NotGiven = NOT_GIVEN, metadata: Optional[object] | NotGiven = NOT_GIVEN, model: Optional[str] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, thread: thread_create_and_run_params.Thread | NotGiven = NOT_GIVEN, tools: Optional[Iterable[thread_create_and_run_params.Tool]] | NotGiven = NOT_GIVEN, event_handler: AssistantEventHandlerT | None = None, @@ -524,6 +544,7 @@ def create_and_run_stream( "instructions": instructions, "metadata": metadata, "model": model, + "temperature": temperature, "stream": True, "thread": thread, "tools": tools, @@ -723,6 +744,7 @@ async def create_and_run( metadata: Optional[object] | NotGiven = NOT_GIVEN, model: Optional[str] | NotGiven = NOT_GIVEN, stream: Optional[Literal[False]] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, thread: thread_create_and_run_params.Thread | NotGiven = NOT_GIVEN, tools: Optional[Iterable[thread_create_and_run_params.Tool]] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. @@ -757,6 +779,10 @@ async def create_and_run( events, terminating when the Run enters a terminal state with a `data: [DONE]` message. + temperature: What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + make the output more random, while lower values like 0.2 will make it more + focused and deterministic. + thread: If no thread is provided, an empty thread will be created. tools: Override the tools the assistant can use for this run. This is useful for @@ -781,6 +807,7 @@ async def create_and_run( instructions: Optional[str] | NotGiven = NOT_GIVEN, metadata: Optional[object] | NotGiven = NOT_GIVEN, model: Optional[str] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, thread: thread_create_and_run_params.Thread | NotGiven = NOT_GIVEN, tools: Optional[Iterable[thread_create_and_run_params.Tool]] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. @@ -815,6 +842,10 @@ async def create_and_run( model associated with the assistant. If not, the model associated with the assistant will be used. + temperature: What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + make the output more random, while lower values like 0.2 will make it more + focused and deterministic. + thread: If no thread is provided, an empty thread will be created. tools: Override the tools the assistant can use for this run. This is useful for @@ -839,6 +870,7 @@ async def create_and_run( instructions: Optional[str] | NotGiven = NOT_GIVEN, metadata: Optional[object] | NotGiven = NOT_GIVEN, model: Optional[str] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, thread: thread_create_and_run_params.Thread | NotGiven = NOT_GIVEN, tools: Optional[Iterable[thread_create_and_run_params.Tool]] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. @@ -873,6 +905,10 @@ async def create_and_run( model associated with the assistant. If not, the model associated with the assistant will be used. + temperature: What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + make the output more random, while lower values like 0.2 will make it more + focused and deterministic. + thread: If no thread is provided, an empty thread will be created. tools: Override the tools the assistant can use for this run. This is useful for @@ -897,6 +933,7 @@ async def create_and_run( metadata: Optional[object] | NotGiven = NOT_GIVEN, model: Optional[str] | NotGiven = NOT_GIVEN, stream: Optional[Literal[False]] | Literal[True] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, thread: thread_create_and_run_params.Thread | NotGiven = NOT_GIVEN, tools: Optional[Iterable[thread_create_and_run_params.Tool]] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. @@ -916,6 +953,7 @@ async def create_and_run( "metadata": metadata, "model": model, "stream": stream, + "temperature": temperature, "thread": thread, "tools": tools, }, @@ -937,6 +975,7 @@ def create_and_run_stream( instructions: Optional[str] | NotGiven = NOT_GIVEN, metadata: Optional[object] | NotGiven = NOT_GIVEN, model: Optional[str] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, thread: thread_create_and_run_params.Thread | NotGiven = NOT_GIVEN, tools: Optional[Iterable[thread_create_and_run_params.Tool]] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. @@ -957,6 +996,7 @@ def create_and_run_stream( instructions: Optional[str] | NotGiven = NOT_GIVEN, metadata: Optional[object] | NotGiven = NOT_GIVEN, model: Optional[str] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, thread: thread_create_and_run_params.Thread | NotGiven = NOT_GIVEN, tools: Optional[Iterable[thread_create_and_run_params.Tool]] | NotGiven = NOT_GIVEN, event_handler: AsyncAssistantEventHandlerT, @@ -977,6 +1017,7 @@ def create_and_run_stream( instructions: Optional[str] | NotGiven = NOT_GIVEN, metadata: Optional[object] | NotGiven = NOT_GIVEN, model: Optional[str] | NotGiven = NOT_GIVEN, + temperature: Optional[float] | NotGiven = NOT_GIVEN, thread: thread_create_and_run_params.Thread | NotGiven = NOT_GIVEN, tools: Optional[Iterable[thread_create_and_run_params.Tool]] | NotGiven = NOT_GIVEN, event_handler: AsyncAssistantEventHandlerT | None = None, @@ -1005,6 +1046,7 @@ def create_and_run_stream( "instructions": instructions, "metadata": metadata, "model": model, + "temperature": temperature, "stream": True, "thread": thread, "tools": tools, diff --git a/src/openai/types/beta/thread_create_and_run_params.py b/src/openai/types/beta/thread_create_and_run_params.py index 9c16e1133f..d4266fc48c 100644 --- a/src/openai/types/beta/thread_create_and_run_params.py +++ b/src/openai/types/beta/thread_create_and_run_params.py @@ -49,6 +49,13 @@ class ThreadCreateAndRunParamsBase(TypedDict, total=False): assistant will be used. """ + temperature: Optional[float] + """What sampling temperature to use, between 0 and 2. + + Higher values like 0.8 will make the output more random, while lower values like + 0.2 will make it more focused and deterministic. + """ + thread: Thread """If no thread is provided, an empty thread will be created.""" @@ -63,10 +70,13 @@ class ThreadMessage(TypedDict, total=False): content: Required[str] """The content of the message.""" - role: Required[Literal["user"]] - """The role of the entity that is creating the message. + role: Required[Literal["user", "assistant"]] + """The role of the entity that is creating the message. Allowed values include: - Currently only `user` is supported. + - `user`: Indicates the message is sent by an actual user and should be used in + most cases to represent user-generated messages. + - `assistant`: Indicates the message is generated by the assistant. Use this + value to insert messages from the assistant into the conversation. """ file_ids: List[str] diff --git a/src/openai/types/beta/thread_create_params.py b/src/openai/types/beta/thread_create_params.py index b3dda503ff..1b382186aa 100644 --- a/src/openai/types/beta/thread_create_params.py +++ b/src/openai/types/beta/thread_create_params.py @@ -28,10 +28,13 @@ class Message(TypedDict, total=False): content: Required[str] """The content of the message.""" - role: Required[Literal["user"]] - """The role of the entity that is creating the message. + role: Required[Literal["user", "assistant"]] + """The role of the entity that is creating the message. Allowed values include: - Currently only `user` is supported. + - `user`: Indicates the message is sent by an actual user and should be used in + most cases to represent user-generated messages. + - `assistant`: Indicates the message is generated by the assistant. Use this + value to insert messages from the assistant into the conversation. """ file_ids: List[str] diff --git a/src/openai/types/beta/threads/message.py b/src/openai/types/beta/threads/message.py index 027e2bfa15..bde0263975 100644 --- a/src/openai/types/beta/threads/message.py +++ b/src/openai/types/beta/threads/message.py @@ -63,9 +63,9 @@ class Message(BaseModel): run_id: Optional[str] = None """ - If applicable, the ID of the - [run](https://platform.openai.com/docs/api-reference/runs) associated with the - authoring of this message. + The ID of the [run](https://platform.openai.com/docs/api-reference/runs) + associated with the creation of this message. Value is `null` when messages are + created manually using the create message or create thread endpoints. """ status: Literal["in_progress", "incomplete", "completed"] diff --git a/src/openai/types/beta/threads/message_create_params.py b/src/openai/types/beta/threads/message_create_params.py index b2f27deb3e..9b9467ef4d 100644 --- a/src/openai/types/beta/threads/message_create_params.py +++ b/src/openai/types/beta/threads/message_create_params.py @@ -12,10 +12,13 @@ class MessageCreateParams(TypedDict, total=False): content: Required[str] """The content of the message.""" - role: Required[Literal["user"]] - """The role of the entity that is creating the message. + role: Required[Literal["user", "assistant"]] + """The role of the entity that is creating the message. Allowed values include: - Currently only `user` is supported. + - `user`: Indicates the message is sent by an actual user and should be used in + most cases to represent user-generated messages. + - `assistant`: Indicates the message is generated by the assistant. Use this + value to insert messages from the assistant into the conversation. """ file_ids: List[str] diff --git a/src/openai/types/beta/threads/run.py b/src/openai/types/beta/threads/run.py index d2cac4c279..3ab276245f 100644 --- a/src/openai/types/beta/threads/run.py +++ b/src/openai/types/beta/threads/run.py @@ -139,3 +139,6 @@ class Run(BaseModel): This value will be `null` if the run is not in a terminal state (i.e. `in_progress`, `queued`, etc.). """ + + temperature: Optional[float] = None + """The sampling temperature used for this run. If not set, defaults to 1.""" diff --git a/src/openai/types/beta/threads/run_create_params.py b/src/openai/types/beta/threads/run_create_params.py index 89dff389a9..ac185973a5 100644 --- a/src/openai/types/beta/threads/run_create_params.py +++ b/src/openai/types/beta/threads/run_create_params.py @@ -48,6 +48,13 @@ class RunCreateParamsBase(TypedDict, total=False): assistant will be used. """ + temperature: Optional[float] + """What sampling temperature to use, between 0 and 2. + + Higher values like 0.8 will make the output more random, while lower values like + 0.2 will make it more focused and deterministic. + """ + tools: Optional[Iterable[AssistantToolParam]] """Override the tools the assistant can use for this run. diff --git a/tests/api_resources/beta/test_threads.py b/tests/api_resources/beta/test_threads.py index 57dda57d16..fd3f7c5102 100644 --- a/tests/api_resources/beta/test_threads.py +++ b/tests/api_resources/beta/test_threads.py @@ -210,6 +210,7 @@ def test_method_create_and_run_with_all_params_overload_1(self, client: OpenAI) metadata={}, model="string", stream=False, + temperature=1, thread={ "messages": [ { @@ -277,6 +278,7 @@ def test_method_create_and_run_with_all_params_overload_2(self, client: OpenAI) instructions="string", metadata={}, model="string", + temperature=1, thread={ "messages": [ { @@ -522,6 +524,7 @@ async def test_method_create_and_run_with_all_params_overload_1(self, async_clie metadata={}, model="string", stream=False, + temperature=1, thread={ "messages": [ { @@ -589,6 +592,7 @@ async def test_method_create_and_run_with_all_params_overload_2(self, async_clie instructions="string", metadata={}, model="string", + temperature=1, thread={ "messages": [ { diff --git a/tests/api_resources/beta/threads/test_runs.py b/tests/api_resources/beta/threads/test_runs.py index 3a9719b420..aabe2c7fc9 100644 --- a/tests/api_resources/beta/threads/test_runs.py +++ b/tests/api_resources/beta/threads/test_runs.py @@ -38,6 +38,7 @@ def test_method_create_with_all_params_overload_1(self, client: OpenAI) -> None: metadata={}, model="string", stream=False, + temperature=1, tools=[{"type": "code_interpreter"}, {"type": "code_interpreter"}, {"type": "code_interpreter"}], ) assert_matches_type(Run, run, path=["response"]) @@ -95,6 +96,7 @@ def test_method_create_with_all_params_overload_2(self, client: OpenAI) -> None: instructions="string", metadata={}, model="string", + temperature=1, tools=[{"type": "code_interpreter"}, {"type": "code_interpreter"}, {"type": "code_interpreter"}], ) run_stream.response.close() @@ -492,6 +494,7 @@ async def test_method_create_with_all_params_overload_1(self, async_client: Asyn metadata={}, model="string", stream=False, + temperature=1, tools=[{"type": "code_interpreter"}, {"type": "code_interpreter"}, {"type": "code_interpreter"}], ) assert_matches_type(Run, run, path=["response"]) @@ -549,6 +552,7 @@ async def test_method_create_with_all_params_overload_2(self, async_client: Asyn instructions="string", metadata={}, model="string", + temperature=1, tools=[{"type": "code_interpreter"}, {"type": "code_interpreter"}, {"type": "code_interpreter"}], ) await run_stream.response.aclose()