diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 19cc6edce7..d55a714ec5 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "1.14.2" + ".": "1.14.3" } \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 7497d6af56..913dece99e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,27 @@ # Changelog +## 1.14.3 (2024-03-25) + +Full Changelog: [v1.14.2...v1.14.3](https://github.com/openai/openai-python/compare/v1.14.2...v1.14.3) + +### Bug Fixes + +* revert regression with 3.7 support ([#1269](https://github.com/openai/openai-python/issues/1269)) ([37aed56](https://github.com/openai/openai-python/commit/37aed564143dc7281f1eaa6ab64ec5ca334cf25e)) + + +### Chores + +* **internal:** construct error properties instead of using the raw response ([#1257](https://github.com/openai/openai-python/issues/1257)) ([11dce5c](https://github.com/openai/openai-python/commit/11dce5c66395722b245f5d5461ce379ca7b939e4)) +* **internal:** formatting change ([#1258](https://github.com/openai/openai-python/issues/1258)) ([b907dd7](https://github.com/openai/openai-python/commit/b907dd7dcae895e4209559da061d0991a8d640a6)) +* **internal:** loosen input type for util function ([#1250](https://github.com/openai/openai-python/issues/1250)) ([fc8b4c3](https://github.com/openai/openai-python/commit/fc8b4c37dc91dfcc0535c19236092992171784a0)) + + +### Documentation + +* **contributing:** fix typo ([#1264](https://github.com/openai/openai-python/issues/1264)) ([835cb9b](https://github.com/openai/openai-python/commit/835cb9b2f92e2aa3329545b4677865dcd4fd00f0)) +* **readme:** consistent use of sentence case in headings ([#1255](https://github.com/openai/openai-python/issues/1255)) ([519f371](https://github.com/openai/openai-python/commit/519f371af779b5fa353292ff5a2d3332afe0987e)) +* **readme:** document how to make undocumented requests ([#1256](https://github.com/openai/openai-python/issues/1256)) ([5887858](https://github.com/openai/openai-python/commit/5887858a7b649dfde5b733ef01e5cffcf953b2a7)) + ## 1.14.2 (2024-03-19) Full Changelog: [v1.14.1...v1.14.2](https://github.com/openai/openai-python/compare/v1.14.1...v1.14.2) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 7473159258..354d21b2d2 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -121,5 +121,5 @@ You can release to package managers by using [the `Publish PyPI` GitHub action]( ### Publish manually -If you need to manually release a package, you can run the `bin/publish-pypi` script with an `PYPI_TOKEN` set on +If you need to manually release a package, you can run the `bin/publish-pypi` script with a `PYPI_TOKEN` set on the environment. diff --git a/README.md b/README.md index befe927cea..6f446d82e1 100644 --- a/README.md +++ b/README.md @@ -101,7 +101,7 @@ asyncio.run(main()) Functionality between the synchronous and asynchronous clients is otherwise identical. -## Streaming Responses +## Streaming responses We provide support for streaming responses using Server Side Events (SSE). @@ -281,7 +281,7 @@ completion = client.chat.completions.create( ) ``` -## File Uploads +## File uploads Request parameters that correspond to file uploads can be passed as `bytes`, a [`PathLike`](https://docs.python.org/3/library/os.html#os.PathLike) instance or a tuple of `(filename, contents, media type)`. @@ -487,6 +487,41 @@ with client.chat.completions.with_streaming_response.create( The context manager is required so that the response will reliably be closed. +### Making custom/undocumented requests + +This library is typed for convenient access the documented API. + +If you need to access undocumented endpoints, params, or response properties, the library can still be used. + +#### Undocumented endpoints + +To make requests to undocumented endpoints, you can make requests using `client.get`, `client.post`, and other +http verbs. Options on the client will be respected (such as retries) will be respected when making this +request. + +```py +import httpx + +response = client.post( + "/foo", + cast_to=httpx.Response, + body={"my_param": True}, +) + +print(response.headers.get("x-foo")) +``` + +#### Undocumented params + +If you want to explicitly send an extra param, you can do so with the `extra_query`, `extra_body`, and `extra_headers` request +options. + +#### Undocumented properties + +To access undocumented response properties, you can access the extra fields like `response.unknown_prop`. You +can also get all the extra fields on the Pydantic model as a dict with +[`response.model_extra`](https://docs.pydantic.dev/latest/api/base_model/#pydantic.BaseModel.model_extra). + ### Configuring the HTTP client You can directly override the [httpx client](https://www.python-httpx.org/api/#client) to customize it for your use case, including: diff --git a/pyproject.toml b/pyproject.toml index de412f3907..8e8ce06881 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "openai" -version = "1.14.2" +version = "1.14.3" description = "The official Python library for the openai API" readme = "README.md" license = "Apache-2.0" @@ -135,6 +135,7 @@ reportImplicitOverride = true reportImportCycles = false reportPrivateUsage = false + [tool.ruff] line-length = 120 output-format = "grouped" diff --git a/src/openai/_exceptions.py b/src/openai/_exceptions.py index 350fd2584b..074752c8a1 100644 --- a/src/openai/_exceptions.py +++ b/src/openai/_exceptions.py @@ -8,6 +8,7 @@ import httpx from ._utils import is_dict +from ._models import construct_type __all__ = [ "BadRequestError", @@ -51,9 +52,9 @@ def __init__(self, message: str, request: httpx.Request, *, body: object | None) self.body = body if is_dict(body): - self.code = cast(Any, body.get("code")) - self.param = cast(Any, body.get("param")) - self.type = cast(Any, body.get("type")) + self.code = cast(Any, construct_type(type_=Optional[str], value=body.get("code"))) + self.param = cast(Any, construct_type(type_=Optional[str], value=body.get("param"))) + self.type = cast(Any, construct_type(type_=str, value=body.get("type"))) else: self.code = None self.param = None diff --git a/src/openai/_models.py b/src/openai/_models.py index 166973538f..77c755b135 100644 --- a/src/openai/_models.py +++ b/src/openai/_models.py @@ -290,11 +290,15 @@ def is_basemodel_type(type_: type) -> TypeGuard[type[BaseModel] | type[GenericMo return issubclass(origin, BaseModel) or issubclass(origin, GenericModel) -def construct_type(*, value: object, type_: type) -> object: +def construct_type(*, value: object, type_: object) -> object: """Loose coercion to the expected type with construction of nested values. If the given value does not match the expected type then it is returned as-is. """ + # we allow `object` as the input type because otherwise, passing things like + # `Literal['value']` will be reported as a type error by type checkers + type_ = cast("type[object]", type_) + # unwrap `Annotated[T, ...]` -> `T` if is_annotated_type(type_): meta = get_args(type_)[1:] @@ -534,12 +538,14 @@ class GenericModel(BaseGenericModel, BaseModel): if PYDANTIC_V2: + from pydantic import TypeAdapter as _TypeAdapter + + _CachedTypeAdapter = cast("TypeAdapter[object]", lru_cache(maxsize=None)(_TypeAdapter)) + if TYPE_CHECKING: from pydantic import TypeAdapter else: - from pydantic import TypeAdapter as _TypeAdapter - - TypeAdapter = lru_cache(_TypeAdapter) + TypeAdapter = _CachedTypeAdapter def _validate_non_model_type(*, type_: type[_T], value: object) -> _T: return TypeAdapter(type_).validate_python(value) diff --git a/src/openai/_version.py b/src/openai/_version.py index b8eb743acc..9163853b72 100644 --- a/src/openai/_version.py +++ b/src/openai/_version.py @@ -1,4 +1,4 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. __title__ = "openai" -__version__ = "1.14.2" # x-release-please-version +__version__ = "1.14.3" # x-release-please-version