Skip to content

Commit d1691a1

Browse files
docs(examples): use named params more (#1543)
1 parent d9b5e48 commit d1691a1

16 files changed

+808
-808
lines changed

tests/api_resources/audio/test_speech.py

+8-8
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ class TestSpeech:
2626
def test_method_create(self, client: OpenAI, respx_mock: MockRouter) -> None:
2727
respx_mock.post("/audio/speech").mock(return_value=httpx.Response(200, json={"foo": "bar"}))
2828
speech = client.audio.speech.create(
29-
input="string",
29+
input="input",
3030
model="string",
3131
voice="alloy",
3232
)
@@ -38,7 +38,7 @@ def test_method_create(self, client: OpenAI, respx_mock: MockRouter) -> None:
3838
def test_method_create_with_all_params(self, client: OpenAI, respx_mock: MockRouter) -> None:
3939
respx_mock.post("/audio/speech").mock(return_value=httpx.Response(200, json={"foo": "bar"}))
4040
speech = client.audio.speech.create(
41-
input="string",
41+
input="input",
4242
model="string",
4343
voice="alloy",
4444
response_format="mp3",
@@ -53,7 +53,7 @@ def test_raw_response_create(self, client: OpenAI, respx_mock: MockRouter) -> No
5353
respx_mock.post("/audio/speech").mock(return_value=httpx.Response(200, json={"foo": "bar"}))
5454

5555
response = client.audio.speech.with_raw_response.create(
56-
input="string",
56+
input="input",
5757
model="string",
5858
voice="alloy",
5959
)
@@ -68,7 +68,7 @@ def test_raw_response_create(self, client: OpenAI, respx_mock: MockRouter) -> No
6868
def test_streaming_response_create(self, client: OpenAI, respx_mock: MockRouter) -> None:
6969
respx_mock.post("/audio/speech").mock(return_value=httpx.Response(200, json={"foo": "bar"}))
7070
with client.audio.speech.with_streaming_response.create(
71-
input="string",
71+
input="input",
7272
model="string",
7373
voice="alloy",
7474
) as response:
@@ -89,7 +89,7 @@ class TestAsyncSpeech:
8989
async def test_method_create(self, async_client: AsyncOpenAI, respx_mock: MockRouter) -> None:
9090
respx_mock.post("/audio/speech").mock(return_value=httpx.Response(200, json={"foo": "bar"}))
9191
speech = await async_client.audio.speech.create(
92-
input="string",
92+
input="input",
9393
model="string",
9494
voice="alloy",
9595
)
@@ -101,7 +101,7 @@ async def test_method_create(self, async_client: AsyncOpenAI, respx_mock: MockRo
101101
async def test_method_create_with_all_params(self, async_client: AsyncOpenAI, respx_mock: MockRouter) -> None:
102102
respx_mock.post("/audio/speech").mock(return_value=httpx.Response(200, json={"foo": "bar"}))
103103
speech = await async_client.audio.speech.create(
104-
input="string",
104+
input="input",
105105
model="string",
106106
voice="alloy",
107107
response_format="mp3",
@@ -116,7 +116,7 @@ async def test_raw_response_create(self, async_client: AsyncOpenAI, respx_mock:
116116
respx_mock.post("/audio/speech").mock(return_value=httpx.Response(200, json={"foo": "bar"}))
117117

118118
response = await async_client.audio.speech.with_raw_response.create(
119-
input="string",
119+
input="input",
120120
model="string",
121121
voice="alloy",
122122
)
@@ -131,7 +131,7 @@ async def test_raw_response_create(self, async_client: AsyncOpenAI, respx_mock:
131131
async def test_streaming_response_create(self, async_client: AsyncOpenAI, respx_mock: MockRouter) -> None:
132132
respx_mock.post("/audio/speech").mock(return_value=httpx.Response(200, json={"foo": "bar"}))
133133
async with async_client.audio.speech.with_streaming_response.create(
134-
input="string",
134+
input="input",
135135
model="string",
136136
voice="alloy",
137137
) as response:

tests/api_resources/audio/test_transcriptions.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -30,8 +30,8 @@ def test_method_create_with_all_params(self, client: OpenAI) -> None:
3030
transcription = client.audio.transcriptions.create(
3131
file=b"raw file contents",
3232
model="whisper-1",
33-
language="string",
34-
prompt="string",
33+
language="language",
34+
prompt="prompt",
3535
response_format="json",
3636
temperature=0,
3737
timestamp_granularities=["word", "segment"],
@@ -81,8 +81,8 @@ async def test_method_create_with_all_params(self, async_client: AsyncOpenAI) ->
8181
transcription = await async_client.audio.transcriptions.create(
8282
file=b"raw file contents",
8383
model="whisper-1",
84-
language="string",
85-
prompt="string",
84+
language="language",
85+
prompt="prompt",
8686
response_format="json",
8787
temperature=0,
8888
timestamp_granularities=["word", "segment"],

tests/api_resources/audio/test_translations.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -30,8 +30,8 @@ def test_method_create_with_all_params(self, client: OpenAI) -> None:
3030
translation = client.audio.translations.create(
3131
file=b"raw file contents",
3232
model="whisper-1",
33-
prompt="string",
34-
response_format="string",
33+
prompt="prompt",
34+
response_format="response_format",
3535
temperature=0,
3636
)
3737
assert_matches_type(Translation, translation, path=["response"])
@@ -79,8 +79,8 @@ async def test_method_create_with_all_params(self, async_client: AsyncOpenAI) ->
7979
translation = await async_client.audio.translations.create(
8080
file=b"raw file contents",
8181
model="whisper-1",
82-
prompt="string",
83-
response_format="string",
82+
prompt="prompt",
83+
response_format="response_format",
8484
temperature=0,
8585
)
8686
assert_matches_type(Translation, translation, path=["response"])

tests/api_resources/beta/test_assistants.py

+40-40
Original file line numberDiff line numberDiff line change
@@ -32,10 +32,10 @@ def test_method_create(self, client: OpenAI) -> None:
3232
def test_method_create_with_all_params(self, client: OpenAI) -> None:
3333
assistant = client.beta.assistants.create(
3434
model="gpt-4-turbo",
35-
description="string",
36-
instructions="string",
35+
description="description",
36+
instructions="instructions",
3737
metadata={},
38-
name="string",
38+
name="name",
3939
response_format="none",
4040
temperature=1,
4141
tool_resources={
@@ -83,14 +83,14 @@ def test_streaming_response_create(self, client: OpenAI) -> None:
8383
@parametrize
8484
def test_method_retrieve(self, client: OpenAI) -> None:
8585
assistant = client.beta.assistants.retrieve(
86-
"string",
86+
"assistant_id",
8787
)
8888
assert_matches_type(Assistant, assistant, path=["response"])
8989

9090
@parametrize
9191
def test_raw_response_retrieve(self, client: OpenAI) -> None:
9292
response = client.beta.assistants.with_raw_response.retrieve(
93-
"string",
93+
"assistant_id",
9494
)
9595

9696
assert response.is_closed is True
@@ -101,7 +101,7 @@ def test_raw_response_retrieve(self, client: OpenAI) -> None:
101101
@parametrize
102102
def test_streaming_response_retrieve(self, client: OpenAI) -> None:
103103
with client.beta.assistants.with_streaming_response.retrieve(
104-
"string",
104+
"assistant_id",
105105
) as response:
106106
assert not response.is_closed
107107
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -121,19 +121,19 @@ def test_path_params_retrieve(self, client: OpenAI) -> None:
121121
@parametrize
122122
def test_method_update(self, client: OpenAI) -> None:
123123
assistant = client.beta.assistants.update(
124-
"string",
124+
assistant_id="assistant_id",
125125
)
126126
assert_matches_type(Assistant, assistant, path=["response"])
127127

128128
@parametrize
129129
def test_method_update_with_all_params(self, client: OpenAI) -> None:
130130
assistant = client.beta.assistants.update(
131-
"string",
132-
description="string",
133-
instructions="string",
131+
assistant_id="assistant_id",
132+
description="description",
133+
instructions="instructions",
134134
metadata={},
135-
model="string",
136-
name="string",
135+
model="model",
136+
name="name",
137137
response_format="none",
138138
temperature=1,
139139
tool_resources={
@@ -148,7 +148,7 @@ def test_method_update_with_all_params(self, client: OpenAI) -> None:
148148
@parametrize
149149
def test_raw_response_update(self, client: OpenAI) -> None:
150150
response = client.beta.assistants.with_raw_response.update(
151-
"string",
151+
assistant_id="assistant_id",
152152
)
153153

154154
assert response.is_closed is True
@@ -159,7 +159,7 @@ def test_raw_response_update(self, client: OpenAI) -> None:
159159
@parametrize
160160
def test_streaming_response_update(self, client: OpenAI) -> None:
161161
with client.beta.assistants.with_streaming_response.update(
162-
"string",
162+
assistant_id="assistant_id",
163163
) as response:
164164
assert not response.is_closed
165165
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -173,7 +173,7 @@ def test_streaming_response_update(self, client: OpenAI) -> None:
173173
def test_path_params_update(self, client: OpenAI) -> None:
174174
with pytest.raises(ValueError, match=r"Expected a non-empty value for `assistant_id` but received ''"):
175175
client.beta.assistants.with_raw_response.update(
176-
"",
176+
assistant_id="",
177177
)
178178

179179
@parametrize
@@ -184,8 +184,8 @@ def test_method_list(self, client: OpenAI) -> None:
184184
@parametrize
185185
def test_method_list_with_all_params(self, client: OpenAI) -> None:
186186
assistant = client.beta.assistants.list(
187-
after="string",
188-
before="string",
187+
after="after",
188+
before="before",
189189
limit=0,
190190
order="asc",
191191
)
@@ -214,14 +214,14 @@ def test_streaming_response_list(self, client: OpenAI) -> None:
214214
@parametrize
215215
def test_method_delete(self, client: OpenAI) -> None:
216216
assistant = client.beta.assistants.delete(
217-
"string",
217+
"assistant_id",
218218
)
219219
assert_matches_type(AssistantDeleted, assistant, path=["response"])
220220

221221
@parametrize
222222
def test_raw_response_delete(self, client: OpenAI) -> None:
223223
response = client.beta.assistants.with_raw_response.delete(
224-
"string",
224+
"assistant_id",
225225
)
226226

227227
assert response.is_closed is True
@@ -232,7 +232,7 @@ def test_raw_response_delete(self, client: OpenAI) -> None:
232232
@parametrize
233233
def test_streaming_response_delete(self, client: OpenAI) -> None:
234234
with client.beta.assistants.with_streaming_response.delete(
235-
"string",
235+
"assistant_id",
236236
) as response:
237237
assert not response.is_closed
238238
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -264,10 +264,10 @@ async def test_method_create(self, async_client: AsyncOpenAI) -> None:
264264
async def test_method_create_with_all_params(self, async_client: AsyncOpenAI) -> None:
265265
assistant = await async_client.beta.assistants.create(
266266
model="gpt-4-turbo",
267-
description="string",
268-
instructions="string",
267+
description="description",
268+
instructions="instructions",
269269
metadata={},
270-
name="string",
270+
name="name",
271271
response_format="none",
272272
temperature=1,
273273
tool_resources={
@@ -315,14 +315,14 @@ async def test_streaming_response_create(self, async_client: AsyncOpenAI) -> Non
315315
@parametrize
316316
async def test_method_retrieve(self, async_client: AsyncOpenAI) -> None:
317317
assistant = await async_client.beta.assistants.retrieve(
318-
"string",
318+
"assistant_id",
319319
)
320320
assert_matches_type(Assistant, assistant, path=["response"])
321321

322322
@parametrize
323323
async def test_raw_response_retrieve(self, async_client: AsyncOpenAI) -> None:
324324
response = await async_client.beta.assistants.with_raw_response.retrieve(
325-
"string",
325+
"assistant_id",
326326
)
327327

328328
assert response.is_closed is True
@@ -333,7 +333,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncOpenAI) -> None:
333333
@parametrize
334334
async def test_streaming_response_retrieve(self, async_client: AsyncOpenAI) -> None:
335335
async with async_client.beta.assistants.with_streaming_response.retrieve(
336-
"string",
336+
"assistant_id",
337337
) as response:
338338
assert not response.is_closed
339339
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -353,19 +353,19 @@ async def test_path_params_retrieve(self, async_client: AsyncOpenAI) -> None:
353353
@parametrize
354354
async def test_method_update(self, async_client: AsyncOpenAI) -> None:
355355
assistant = await async_client.beta.assistants.update(
356-
"string",
356+
assistant_id="assistant_id",
357357
)
358358
assert_matches_type(Assistant, assistant, path=["response"])
359359

360360
@parametrize
361361
async def test_method_update_with_all_params(self, async_client: AsyncOpenAI) -> None:
362362
assistant = await async_client.beta.assistants.update(
363-
"string",
364-
description="string",
365-
instructions="string",
363+
assistant_id="assistant_id",
364+
description="description",
365+
instructions="instructions",
366366
metadata={},
367-
model="string",
368-
name="string",
367+
model="model",
368+
name="name",
369369
response_format="none",
370370
temperature=1,
371371
tool_resources={
@@ -380,7 +380,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncOpenAI) ->
380380
@parametrize
381381
async def test_raw_response_update(self, async_client: AsyncOpenAI) -> None:
382382
response = await async_client.beta.assistants.with_raw_response.update(
383-
"string",
383+
assistant_id="assistant_id",
384384
)
385385

386386
assert response.is_closed is True
@@ -391,7 +391,7 @@ async def test_raw_response_update(self, async_client: AsyncOpenAI) -> None:
391391
@parametrize
392392
async def test_streaming_response_update(self, async_client: AsyncOpenAI) -> None:
393393
async with async_client.beta.assistants.with_streaming_response.update(
394-
"string",
394+
assistant_id="assistant_id",
395395
) as response:
396396
assert not response.is_closed
397397
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -405,7 +405,7 @@ async def test_streaming_response_update(self, async_client: AsyncOpenAI) -> Non
405405
async def test_path_params_update(self, async_client: AsyncOpenAI) -> None:
406406
with pytest.raises(ValueError, match=r"Expected a non-empty value for `assistant_id` but received ''"):
407407
await async_client.beta.assistants.with_raw_response.update(
408-
"",
408+
assistant_id="",
409409
)
410410

411411
@parametrize
@@ -416,8 +416,8 @@ async def test_method_list(self, async_client: AsyncOpenAI) -> None:
416416
@parametrize
417417
async def test_method_list_with_all_params(self, async_client: AsyncOpenAI) -> None:
418418
assistant = await async_client.beta.assistants.list(
419-
after="string",
420-
before="string",
419+
after="after",
420+
before="before",
421421
limit=0,
422422
order="asc",
423423
)
@@ -446,14 +446,14 @@ async def test_streaming_response_list(self, async_client: AsyncOpenAI) -> None:
446446
@parametrize
447447
async def test_method_delete(self, async_client: AsyncOpenAI) -> None:
448448
assistant = await async_client.beta.assistants.delete(
449-
"string",
449+
"assistant_id",
450450
)
451451
assert_matches_type(AssistantDeleted, assistant, path=["response"])
452452

453453
@parametrize
454454
async def test_raw_response_delete(self, async_client: AsyncOpenAI) -> None:
455455
response = await async_client.beta.assistants.with_raw_response.delete(
456-
"string",
456+
"assistant_id",
457457
)
458458

459459
assert response.is_closed is True
@@ -464,7 +464,7 @@ async def test_raw_response_delete(self, async_client: AsyncOpenAI) -> None:
464464
@parametrize
465465
async def test_streaming_response_delete(self, async_client: AsyncOpenAI) -> None:
466466
async with async_client.beta.assistants.with_streaming_response.delete(
467-
"string",
467+
"assistant_id",
468468
) as response:
469469
assert not response.is_closed
470470
assert response.http_request.headers.get("X-Stainless-Lang") == "python"

0 commit comments

Comments
 (0)