diff --git a/.release-please-manifest.json b/.release-please-manifest.json index aaf968a..b56c3d0 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.1.0-alpha.3" + ".": "0.1.0-alpha.4" } \ No newline at end of file diff --git a/.stats.yml b/.stats.yml index 3ad3c39..1b104f7 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,2 +1,2 @@ configured_endpoints: 21 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/prompt-foundry%2Fprompt-foundry-sdk-5099b2b6ce467e4cae4520a778d3149d96ddf1331960860509028e7e2ac4b3f7.yml +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/prompt-foundry%2Fprompt-foundry-sdk-972b22a25d0d8bc0d4434735eb429ca11023968e7f14a1a39d48a1949386aa3b.yml diff --git a/CHANGELOG.md b/CHANGELOG.md index 7bf4794..2153696 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,25 @@ # Changelog +## 0.1.0-alpha.4 (2024-06-25) + +Full Changelog: [v0.1.0-alpha.3...v0.1.0-alpha.4](https://github.com/prompt-foundry/python-sdk/compare/v0.1.0-alpha.3...v0.1.0-alpha.4) + +### Features + +* **api:** OpenAPI spec update via Stainless API ([#56](https://github.com/prompt-foundry/python-sdk/issues/56)) ([0ed089b](https://github.com/prompt-foundry/python-sdk/commit/0ed089ba5a1b1ea3cf445ddb0e49bd5340b9bafd)) +* **api:** OpenAPI spec update via Stainless API ([#57](https://github.com/prompt-foundry/python-sdk/issues/57)) ([7180cd5](https://github.com/prompt-foundry/python-sdk/commit/7180cd5ea9335d1abc41329a26d0fe9a0cfd863f)) +* **api:** OpenAPI spec update via Stainless API ([#58](https://github.com/prompt-foundry/python-sdk/issues/58)) ([9f8a390](https://github.com/prompt-foundry/python-sdk/commit/9f8a3905eadb6876edc735b7af7f9d9f479e1ed9)) +* **api:** OpenAPI spec update via Stainless API ([#59](https://github.com/prompt-foundry/python-sdk/issues/59)) ([0eb8082](https://github.com/prompt-foundry/python-sdk/commit/0eb80823ac2357d035514caf0801cc39b415c36d)) +* **api:** OpenAPI spec update via Stainless API ([#60](https://github.com/prompt-foundry/python-sdk/issues/60)) ([16d556e](https://github.com/prompt-foundry/python-sdk/commit/16d556eb15620f760d9a26a789eca3180019289d)) +* **api:** OpenAPI spec update via Stainless API ([#61](https://github.com/prompt-foundry/python-sdk/issues/61)) ([054bfe4](https://github.com/prompt-foundry/python-sdk/commit/054bfe4b8ec078735f8a912264bf80ccaa94edab)) +* **api:** OpenAPI spec update via Stainless API ([#62](https://github.com/prompt-foundry/python-sdk/issues/62)) ([9e7efc7](https://github.com/prompt-foundry/python-sdk/commit/9e7efc7cdf6fdecfcdb3f7e5212010b2816e6abc)) +* **api:** OpenAPI spec update via Stainless API ([#63](https://github.com/prompt-foundry/python-sdk/issues/63)) ([8d222e1](https://github.com/prompt-foundry/python-sdk/commit/8d222e1223d4d5c9116588fbdd9119b3221b39b4)) +* **api:** OpenAPI spec update via Stainless API ([#64](https://github.com/prompt-foundry/python-sdk/issues/64)) ([c179b76](https://github.com/prompt-foundry/python-sdk/commit/c179b767eed6fe320eb8b3424154e8e41a768388)) +* **api:** OpenAPI spec update via Stainless API ([#67](https://github.com/prompt-foundry/python-sdk/issues/67)) ([7bfbc54](https://github.com/prompt-foundry/python-sdk/commit/7bfbc54632cec1db3dd990b4b10b930533902496)) +* **api:** OpenAPI spec update via Stainless API ([#68](https://github.com/prompt-foundry/python-sdk/issues/68)) ([e582b76](https://github.com/prompt-foundry/python-sdk/commit/e582b76d645268288d5f2aa5bcca143f8daa7cf3)) +* **api:** update via SDK Studio ([#65](https://github.com/prompt-foundry/python-sdk/issues/65)) ([b147b0a](https://github.com/prompt-foundry/python-sdk/commit/b147b0a4e626adf77413b33cecd5e180f1131e06)) +* **api:** update via SDK Studio ([#66](https://github.com/prompt-foundry/python-sdk/issues/66)) ([9214424](https://github.com/prompt-foundry/python-sdk/commit/92144245e6bcb91ed2b08859195150c40a389a4d)) + ## 0.1.0-alpha.3 (2024-06-24) Full Changelog: [v0.1.0-alpha.2...v0.1.0-alpha.3](https://github.com/prompt-foundry/python-sdk/compare/v0.1.0-alpha.2...v0.1.0-alpha.3) diff --git a/pyproject.toml b/pyproject.toml index bfb960c..4e76749 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "prompt_foundry_python_sdk" -version = "0.1.0-alpha.3" +version = "0.1.0-alpha.4" description = "The prompt engineering, prompt management, and prompt evaluation tool for Python " keywords = ["llm evaluation", "prompt management", "prompt testing", "prompt engineering"] dynamic = ["readme"] diff --git a/src/prompt_foundry_python_sdk/_version.py b/src/prompt_foundry_python_sdk/_version.py index 4cc4ccb..b2981f5 100644 --- a/src/prompt_foundry_python_sdk/_version.py +++ b/src/prompt_foundry_python_sdk/_version.py @@ -1,4 +1,4 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. __title__ = "prompt_foundry_python_sdk" -__version__ = "0.1.0-alpha.3" # x-release-please-version +__version__ = "0.1.0-alpha.4" # x-release-please-version diff --git a/src/prompt_foundry_python_sdk/resources/evaluations.py b/src/prompt_foundry_python_sdk/resources/evaluations.py index 18d5bf4..9c2f975 100644 --- a/src/prompt_foundry_python_sdk/resources/evaluations.py +++ b/src/prompt_foundry_python_sdk/resources/evaluations.py @@ -99,7 +99,7 @@ def update( timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, ) -> Evaluation: """ - Update a evaluation by ID. + Update an evaluation by ID. Args: appended_messages: The messages to append to the completion messages when running the evaluation. @@ -163,7 +163,7 @@ def delete( timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, ) -> EvaluationDeleteResponse: """ - Delete a evaluation by ID. + Delete an evaluation by ID. Args: extra_headers: Send extra headers @@ -196,7 +196,7 @@ def get( timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, ) -> Evaluation: """ - Retrieve a evaluation by ID + Retrieve an evaluation by ID Args: extra_headers: Send extra headers @@ -287,7 +287,7 @@ async def update( timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, ) -> Evaluation: """ - Update a evaluation by ID. + Update an evaluation by ID. Args: appended_messages: The messages to append to the completion messages when running the evaluation. @@ -351,7 +351,7 @@ async def delete( timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, ) -> EvaluationDeleteResponse: """ - Delete a evaluation by ID. + Delete an evaluation by ID. Args: extra_headers: Send extra headers @@ -384,7 +384,7 @@ async def get( timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, ) -> Evaluation: """ - Retrieve a evaluation by ID + Retrieve an evaluation by ID Args: extra_headers: Send extra headers diff --git a/src/prompt_foundry_python_sdk/types/prompt_configuration.py b/src/prompt_foundry_python_sdk/types/prompt_configuration.py index d02bb08..fd69dde 100644 --- a/src/prompt_foundry_python_sdk/types/prompt_configuration.py +++ b/src/prompt_foundry_python_sdk/types/prompt_configuration.py @@ -54,6 +54,8 @@ class Parameters(BaseModel): api_model_name: str = FieldInfo(alias="modelName") """Example: "gpt-3.5-turbo" """ + parallel_tool_calls: bool = FieldInfo(alias="parallelToolCalls") + presence_penalty: float = FieldInfo(alias="presencePenalty") """Example: 0""" diff --git a/src/prompt_foundry_python_sdk/types/prompt_create_params.py b/src/prompt_foundry_python_sdk/types/prompt_create_params.py index 0317c3a..d494bca 100644 --- a/src/prompt_foundry_python_sdk/types/prompt_create_params.py +++ b/src/prompt_foundry_python_sdk/types/prompt_create_params.py @@ -64,6 +64,8 @@ class Parameters(TypedDict, total=False): model_name: Required[Annotated[str, PropertyInfo(alias="modelName")]] """Example: "gpt-3.5-turbo" """ + parallel_tool_calls: Required[Annotated[bool, PropertyInfo(alias="parallelToolCalls")]] + presence_penalty: Required[Annotated[float, PropertyInfo(alias="presencePenalty")]] """Example: 0""" diff --git a/src/prompt_foundry_python_sdk/types/prompt_update_params.py b/src/prompt_foundry_python_sdk/types/prompt_update_params.py index 42e45e7..87326bf 100644 --- a/src/prompt_foundry_python_sdk/types/prompt_update_params.py +++ b/src/prompt_foundry_python_sdk/types/prompt_update_params.py @@ -64,6 +64,8 @@ class Parameters(TypedDict, total=False): model_name: Required[Annotated[str, PropertyInfo(alias="modelName")]] """Example: "gpt-3.5-turbo" """ + parallel_tool_calls: Required[Annotated[bool, PropertyInfo(alias="parallelToolCalls")]] + presence_penalty: Required[Annotated[float, PropertyInfo(alias="presencePenalty")]] """Example: 0""" diff --git a/tests/api_resources/test_prompts.py b/tests/api_resources/test_prompts.py index d259be9..b60ffcc 100644 --- a/tests/api_resources/test_prompts.py +++ b/tests/api_resources/test_prompts.py @@ -132,6 +132,7 @@ def test_method_create(self, client: PromptFoundry) -> None: "seed": 0, "tool_choice": "string", "stream": True, + "parallel_tool_calls": True, }, tools=[{"tool_id": "string"}, {"tool_id": "string"}, {"tool_id": "string"}], ) @@ -247,6 +248,7 @@ def test_raw_response_create(self, client: PromptFoundry) -> None: "seed": 0, "tool_choice": "string", "stream": True, + "parallel_tool_calls": True, }, tools=[{"tool_id": "string"}, {"tool_id": "string"}, {"tool_id": "string"}], ) @@ -366,6 +368,7 @@ def test_streaming_response_create(self, client: PromptFoundry) -> None: "seed": 0, "tool_choice": "string", "stream": True, + "parallel_tool_calls": True, }, tools=[{"tool_id": "string"}, {"tool_id": "string"}, {"tool_id": "string"}], ) as response: @@ -488,6 +491,7 @@ def test_method_update(self, client: PromptFoundry) -> None: "seed": 0, "tool_choice": "string", "stream": True, + "parallel_tool_calls": True, }, tools=[{"tool_id": "string"}, {"tool_id": "string"}, {"tool_id": "string"}], ) @@ -604,6 +608,7 @@ def test_raw_response_update(self, client: PromptFoundry) -> None: "seed": 0, "tool_choice": "string", "stream": True, + "parallel_tool_calls": True, }, tools=[{"tool_id": "string"}, {"tool_id": "string"}, {"tool_id": "string"}], ) @@ -724,6 +729,7 @@ def test_streaming_response_update(self, client: PromptFoundry) -> None: "seed": 0, "tool_choice": "string", "stream": True, + "parallel_tool_calls": True, }, tools=[{"tool_id": "string"}, {"tool_id": "string"}, {"tool_id": "string"}], ) as response: @@ -847,6 +853,7 @@ def test_path_params_update(self, client: PromptFoundry) -> None: "seed": 0, "tool_choice": "string", "stream": True, + "parallel_tool_calls": True, }, tools=[{"tool_id": "string"}, {"tool_id": "string"}, {"tool_id": "string"}], ) @@ -1303,6 +1310,7 @@ async def test_method_create(self, async_client: AsyncPromptFoundry) -> None: "seed": 0, "tool_choice": "string", "stream": True, + "parallel_tool_calls": True, }, tools=[{"tool_id": "string"}, {"tool_id": "string"}, {"tool_id": "string"}], ) @@ -1418,6 +1426,7 @@ async def test_raw_response_create(self, async_client: AsyncPromptFoundry) -> No "seed": 0, "tool_choice": "string", "stream": True, + "parallel_tool_calls": True, }, tools=[{"tool_id": "string"}, {"tool_id": "string"}, {"tool_id": "string"}], ) @@ -1537,6 +1546,7 @@ async def test_streaming_response_create(self, async_client: AsyncPromptFoundry) "seed": 0, "tool_choice": "string", "stream": True, + "parallel_tool_calls": True, }, tools=[{"tool_id": "string"}, {"tool_id": "string"}, {"tool_id": "string"}], ) as response: @@ -1659,6 +1669,7 @@ async def test_method_update(self, async_client: AsyncPromptFoundry) -> None: "seed": 0, "tool_choice": "string", "stream": True, + "parallel_tool_calls": True, }, tools=[{"tool_id": "string"}, {"tool_id": "string"}, {"tool_id": "string"}], ) @@ -1775,6 +1786,7 @@ async def test_raw_response_update(self, async_client: AsyncPromptFoundry) -> No "seed": 0, "tool_choice": "string", "stream": True, + "parallel_tool_calls": True, }, tools=[{"tool_id": "string"}, {"tool_id": "string"}, {"tool_id": "string"}], ) @@ -1895,6 +1907,7 @@ async def test_streaming_response_update(self, async_client: AsyncPromptFoundry) "seed": 0, "tool_choice": "string", "stream": True, + "parallel_tool_calls": True, }, tools=[{"tool_id": "string"}, {"tool_id": "string"}, {"tool_id": "string"}], ) as response: @@ -2018,6 +2031,7 @@ async def test_path_params_update(self, async_client: AsyncPromptFoundry) -> Non "seed": 0, "tool_choice": "string", "stream": True, + "parallel_tool_calls": True, }, tools=[{"tool_id": "string"}, {"tool_id": "string"}, {"tool_id": "string"}], )