Skip to content

release: 1.3.9 #963

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 6 commits into from
Dec 13, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .release-please-manifest.json
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
{
".": "1.3.8"
".": "1.3.9"
}
16 changes: 16 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,21 @@
# Changelog

## 1.3.9 (2023-12-12)

Full Changelog: [v1.3.8...v1.3.9](https://github.com/openai/openai-python/compare/v1.3.8...v1.3.9)

### Documentation

* improve README timeout comment ([#964](https://github.com/openai/openai-python/issues/964)) ([3c3ed5e](https://github.com/openai/openai-python/commit/3c3ed5edd938a9333e8d2fa47cb4b44178eef89a))
* small Improvement in the async chat response code ([#959](https://github.com/openai/openai-python/issues/959)) ([fb9d0a3](https://github.com/openai/openai-python/commit/fb9d0a358fa232043d9d5c149b6a888d50127c7b))
* small streaming readme improvements ([#962](https://github.com/openai/openai-python/issues/962)) ([f3be2e5](https://github.com/openai/openai-python/commit/f3be2e5cc24988471e6cedb3e34bdfd3123edc63))


### Refactors

* **client:** simplify cleanup ([#966](https://github.com/openai/openai-python/issues/966)) ([5c138f4](https://github.com/openai/openai-python/commit/5c138f4a7947e5b4aae8779fae78ca51269b355a))
* simplify internal error handling ([#968](https://github.com/openai/openai-python/issues/968)) ([d187f6b](https://github.com/openai/openai-python/commit/d187f6b6e4e646cca39c6ca35c618aa5c1bfbd61))

## 1.3.8 (2023-12-08)

Full Changelog: [v1.3.7...v1.3.8](https://github.com/openai/openai-python/compare/v1.3.7...v1.3.8)
Expand Down
25 changes: 14 additions & 11 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -97,8 +97,7 @@ stream = client.chat.completions.create(
stream=True,
)
for chunk in stream:
if chunk.choices[0].delta.content is not None:
print(chunk.choices[0].delta.content)
print(chunk.choices[0].delta.content or "", end="")
```

The async client uses the exact same interface.
Expand All @@ -108,14 +107,18 @@ from openai import AsyncOpenAI

client = AsyncOpenAI()

stream = await client.chat.completions.create(
model="gpt-4",
messages=[{"role": "user", "content": "Say this is a test"}],
stream=True,
)
async for chunk in stream:
if chunk.choices[0].delta.content is not None:
print(chunk.choices[0].delta.content)

async def main():
stream = await client.chat.completions.create(
model="gpt-4",
messages=[{"role": "user", "content": "Say this is a test"}],
stream=True,
)
async for chunk in stream:
print(chunk.choices[0].delta.content or "", end="")


asyncio.run(main())
```

## Module-level client
Expand Down Expand Up @@ -359,7 +362,7 @@ from openai import OpenAI

# Configure the default for all requests:
client = OpenAI(
# default is 60s
# 20 seconds (default is 10 minutes)
timeout=20.0,
)

Expand Down
4 changes: 2 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[project]
name = "openai"
version = "1.3.8"
version = "1.3.9"
description = "The official Python library for the openai API"
readme = "README.md"
license = "Apache-2.0"
Expand Down Expand Up @@ -84,7 +84,7 @@ typecheck = { chain = [
]}
"typecheck:pyright" = "pyright"
"typecheck:verify-types" = "pyright --verifytypes openai --ignoreexternal"
"typecheck:mypy" = "mypy --enable-incomplete-feature=Unpack ."
"typecheck:mypy" = "mypy ."

[build-system]
requires = ["hatchling"]
Expand Down
7 changes: 0 additions & 7 deletions src/openai/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -221,13 +221,6 @@ def _client(self, value: _httpx.Client) -> None: # type: ignore

http_client = value

@override
def __del__(self) -> None:
try:
super().__del__()
except Exception:
pass


class _AzureModuleClient(_ModuleClient, AzureOpenAI): # type: ignore
...
Expand Down
128 changes: 66 additions & 62 deletions src/openai/_base_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import time
import uuid
import email
import asyncio
import inspect
import logging
import platform
Expand Down Expand Up @@ -672,9 +673,16 @@ def _idempotency_key(self) -> str:
return f"stainless-python-retry-{uuid.uuid4()}"


class SyncHttpxClientWrapper(httpx.Client):
def __del__(self) -> None:
try:
self.close()
except Exception:
pass


class SyncAPIClient(BaseClient[httpx.Client, Stream[Any]]):
_client: httpx.Client
_has_custom_http_client: bool
_default_stream_cls: type[Stream[Any]] | None = None

def __init__(
Expand Down Expand Up @@ -747,15 +755,14 @@ def __init__(
custom_headers=custom_headers,
_strict_response_validation=_strict_response_validation,
)
self._client = http_client or httpx.Client(
self._client = http_client or SyncHttpxClientWrapper(
base_url=base_url,
# cast to a valid type because mypy doesn't understand our type narrowing
timeout=cast(Timeout, timeout),
proxies=proxies,
transport=transport,
limits=limits,
)
self._has_custom_http_client = bool(http_client)

def is_closed(self) -> bool:
return self._client.is_closed
Expand Down Expand Up @@ -866,66 +873,61 @@ def _request(
request = self._build_request(options)
self._prepare_request(request)

response = None

try:
response = self._client.send(
request,
auth=self.custom_auth,
stream=stream or self._should_stream_response_body(request=request),
)
log.debug(
'HTTP Request: %s %s "%i %s"', request.method, request.url, response.status_code, response.reason_phrase
)
response.raise_for_status()
except httpx.HTTPStatusError as err: # thrown on 4xx and 5xx status code
if retries > 0 and self._should_retry(err.response):
err.response.close()
except httpx.TimeoutException as err:
if retries > 0:
return self._retry_request(
options,
cast_to,
retries,
err.response.headers,
stream=stream,
stream_cls=stream_cls,
response_headers=None,
)

# If the response is streamed then we need to explicitly read the response
# to completion before attempting to access the response text.
if not err.response.is_closed:
err.response.read()

raise self._make_status_error_from_response(err.response) from None
except httpx.TimeoutException as err:
if response is not None:
response.close()

raise APITimeoutError(request=request) from err
except Exception as err:
if retries > 0:
return self._retry_request(
options,
cast_to,
retries,
stream=stream,
stream_cls=stream_cls,
response_headers=response.headers if response is not None else None,
response_headers=None,
)

raise APITimeoutError(request=request) from err
except Exception as err:
if response is not None:
response.close()
raise APIConnectionError(request=request) from err

if retries > 0:
log.debug(
'HTTP Request: %s %s "%i %s"', request.method, request.url, response.status_code, response.reason_phrase
)

try:
response.raise_for_status()
except httpx.HTTPStatusError as err: # thrown on 4xx and 5xx status code
if retries > 0 and self._should_retry(err.response):
err.response.close()
return self._retry_request(
options,
cast_to,
retries,
err.response.headers,
stream=stream,
stream_cls=stream_cls,
response_headers=response.headers if response is not None else None,
)

raise APIConnectionError(request=request) from err
# If the response is streamed then we need to explicitly read the response
# to completion before attempting to access the response text.
if not err.response.is_closed:
err.response.read()

raise self._make_status_error_from_response(err.response) from None

return self._process_response(
cast_to=cast_to,
Expand Down Expand Up @@ -1135,9 +1137,17 @@ def get_api_list(
return self._request_api_list(model, page, opts)


class AsyncHttpxClientWrapper(httpx.AsyncClient):
def __del__(self) -> None:
try:
# TODO(someday): support non asyncio runtimes here
asyncio.get_running_loop().create_task(self.aclose())
except Exception:
pass


class AsyncAPIClient(BaseClient[httpx.AsyncClient, AsyncStream[Any]]):
_client: httpx.AsyncClient
_has_custom_http_client: bool
_default_stream_cls: type[AsyncStream[Any]] | None = None

def __init__(
Expand Down Expand Up @@ -1210,15 +1220,14 @@ def __init__(
custom_headers=custom_headers,
_strict_response_validation=_strict_response_validation,
)
self._client = http_client or httpx.AsyncClient(
self._client = http_client or AsyncHttpxClientWrapper(
base_url=base_url,
# cast to a valid type because mypy doesn't understand our type narrowing
timeout=cast(Timeout, timeout),
proxies=proxies,
transport=transport,
limits=limits,
)
self._has_custom_http_client = bool(http_client)

def is_closed(self) -> bool:
return self._client.is_closed
Expand Down Expand Up @@ -1326,66 +1335,61 @@ async def _request(
request = self._build_request(options)
await self._prepare_request(request)

response = None

try:
response = await self._client.send(
request,
auth=self.custom_auth,
stream=stream or self._should_stream_response_body(request=request),
)
log.debug(
'HTTP Request: %s %s "%i %s"', request.method, request.url, response.status_code, response.reason_phrase
)
response.raise_for_status()
except httpx.HTTPStatusError as err: # thrown on 4xx and 5xx status code
if retries > 0 and self._should_retry(err.response):
await err.response.aclose()
except httpx.TimeoutException as err:
if retries > 0:
return await self._retry_request(
options,
cast_to,
retries,
err.response.headers,
stream=stream,
stream_cls=stream_cls,
response_headers=None,
)

# If the response is streamed then we need to explicitly read the response
# to completion before attempting to access the response text.
if not err.response.is_closed:
await err.response.aread()

raise self._make_status_error_from_response(err.response) from None
except httpx.TimeoutException as err:
if response is not None:
await response.aclose()

raise APITimeoutError(request=request) from err
except Exception as err:
if retries > 0:
return await self._retry_request(
options,
cast_to,
retries,
stream=stream,
stream_cls=stream_cls,
response_headers=response.headers if response is not None else None,
response_headers=None,
)

raise APITimeoutError(request=request) from err
except Exception as err:
if response is not None:
await response.aclose()
raise APIConnectionError(request=request) from err

if retries > 0:
log.debug(
'HTTP Request: %s %s "%i %s"', request.method, request.url, response.status_code, response.reason_phrase
)

try:
response.raise_for_status()
except httpx.HTTPStatusError as err: # thrown on 4xx and 5xx status code
if retries > 0 and self._should_retry(err.response):
await err.response.aclose()
return await self._retry_request(
options,
cast_to,
retries,
err.response.headers,
stream=stream,
stream_cls=stream_cls,
response_headers=response.headers if response is not None else None,
)

raise APIConnectionError(request=request) from err
# If the response is streamed then we need to explicitly read the response
# to completion before attempting to access the response text.
if not err.response.is_closed:
await err.response.aread()

raise self._make_status_error_from_response(err.response) from None

return self._process_response(
cast_to=cast_to,
Expand Down
Loading