Skip to content

Commit 59a02f0

Browse files
authored
feat: enable_persist: sync updates from stainless branch: yanxi0830/dev (#145)
# What does this PR do? - adapt to llamastack/llama-stack#1012 [//]: # (If resolving an issue, uncomment and update the line below) [//]: # (Closes #[issue-number]) ## Test Plan [Describe the tests you ran to verify your changes with result summaries. *Provide clear instructions so the plan can be easily re-executed.*] [//]: # (## Documentation) [//]: # (- [ ] Added a Changelog entry if the change is significant)
1 parent d6e855e commit 59a02f0

File tree

4 files changed

+23
-14
lines changed

4 files changed

+23
-14
lines changed

src/llama_stack_client/_utils/_sync.py

Lines changed: 17 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,16 +7,20 @@
77
from typing import Any, TypeVar, Callable, Awaitable
88
from typing_extensions import ParamSpec
99

10+
import anyio
11+
import sniffio
12+
import anyio.to_thread
13+
1014
T_Retval = TypeVar("T_Retval")
1115
T_ParamSpec = ParamSpec("T_ParamSpec")
1216

1317

1418
if sys.version_info >= (3, 9):
15-
to_thread = asyncio.to_thread
19+
_asyncio_to_thread = asyncio.to_thread
1620
else:
1721
# backport of https://docs.python.org/3/library/asyncio-task.html#asyncio.to_thread
1822
# for Python 3.8 support
19-
async def to_thread(
23+
async def _asyncio_to_thread(
2024
func: Callable[T_ParamSpec, T_Retval], /, *args: T_ParamSpec.args, **kwargs: T_ParamSpec.kwargs
2125
) -> Any:
2226
"""Asynchronously run function *func* in a separate thread.
@@ -34,6 +38,17 @@ async def to_thread(
3438
return await loop.run_in_executor(None, func_call)
3539

3640

41+
async def to_thread(
42+
func: Callable[T_ParamSpec, T_Retval], /, *args: T_ParamSpec.args, **kwargs: T_ParamSpec.kwargs
43+
) -> T_Retval:
44+
if sniffio.current_async_library() == "asyncio":
45+
return await _asyncio_to_thread(func, *args, **kwargs)
46+
47+
return await anyio.to_thread.run_sync(
48+
functools.partial(func, *args, **kwargs),
49+
)
50+
51+
3752
# inspired by `asyncer`, https://github.com/tiangolo/asyncer
3853
def asyncify(function: Callable[T_ParamSpec, T_Retval]) -> Callable[T_ParamSpec, Awaitable[T_Retval]]:
3954
"""

src/llama_stack_client/types/shared/agent_config.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -49,14 +49,14 @@ class ToolgroupUnionMember1(BaseModel):
4949

5050

5151
class AgentConfig(BaseModel):
52-
enable_session_persistence: bool
53-
5452
instructions: str
5553

5654
model: str
5755

5856
client_tools: Optional[List[ToolDef]] = None
5957

58+
enable_session_persistence: Optional[bool] = None
59+
6060
input_shields: Optional[List[str]] = None
6161

6262
max_infer_iters: Optional[int] = None

src/llama_stack_client/types/shared_params/agent_config.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -50,14 +50,14 @@ class ToolgroupUnionMember1(TypedDict, total=False):
5050

5151

5252
class AgentConfig(TypedDict, total=False):
53-
enable_session_persistence: Required[bool]
54-
5553
instructions: Required[str]
5654

5755
model: Required[str]
5856

5957
client_tools: Iterable[ToolDefParam]
6058

59+
enable_session_persistence: bool
60+
6161
input_shields: List[str]
6262

6363
max_infer_iters: int

tests/api_resources/test_agents.py

Lines changed: 2 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,6 @@ class TestAgents:
2121
def test_method_create(self, client: LlamaStackClient) -> None:
2222
agent = client.agents.create(
2323
agent_config={
24-
"enable_session_persistence": True,
2524
"instructions": "instructions",
2625
"model": "model",
2726
},
@@ -32,7 +31,6 @@ def test_method_create(self, client: LlamaStackClient) -> None:
3231
def test_method_create_with_all_params(self, client: LlamaStackClient) -> None:
3332
agent = client.agents.create(
3433
agent_config={
35-
"enable_session_persistence": True,
3634
"instructions": "instructions",
3735
"model": "model",
3836
"client_tools": [
@@ -51,6 +49,7 @@ def test_method_create_with_all_params(self, client: LlamaStackClient) -> None:
5149
],
5250
}
5351
],
52+
"enable_session_persistence": True,
5453
"input_shields": ["string"],
5554
"max_infer_iters": 0,
5655
"output_shields": ["string"],
@@ -79,7 +78,6 @@ def test_method_create_with_all_params(self, client: LlamaStackClient) -> None:
7978
def test_raw_response_create(self, client: LlamaStackClient) -> None:
8079
response = client.agents.with_raw_response.create(
8180
agent_config={
82-
"enable_session_persistence": True,
8381
"instructions": "instructions",
8482
"model": "model",
8583
},
@@ -94,7 +92,6 @@ def test_raw_response_create(self, client: LlamaStackClient) -> None:
9492
def test_streaming_response_create(self, client: LlamaStackClient) -> None:
9593
with client.agents.with_streaming_response.create(
9694
agent_config={
97-
"enable_session_persistence": True,
9895
"instructions": "instructions",
9996
"model": "model",
10097
},
@@ -153,7 +150,6 @@ class TestAsyncAgents:
153150
async def test_method_create(self, async_client: AsyncLlamaStackClient) -> None:
154151
agent = await async_client.agents.create(
155152
agent_config={
156-
"enable_session_persistence": True,
157153
"instructions": "instructions",
158154
"model": "model",
159155
},
@@ -164,7 +160,6 @@ async def test_method_create(self, async_client: AsyncLlamaStackClient) -> None:
164160
async def test_method_create_with_all_params(self, async_client: AsyncLlamaStackClient) -> None:
165161
agent = await async_client.agents.create(
166162
agent_config={
167-
"enable_session_persistence": True,
168163
"instructions": "instructions",
169164
"model": "model",
170165
"client_tools": [
@@ -183,6 +178,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncLlamaStack
183178
],
184179
}
185180
],
181+
"enable_session_persistence": True,
186182
"input_shields": ["string"],
187183
"max_infer_iters": 0,
188184
"output_shields": ["string"],
@@ -211,7 +207,6 @@ async def test_method_create_with_all_params(self, async_client: AsyncLlamaStack
211207
async def test_raw_response_create(self, async_client: AsyncLlamaStackClient) -> None:
212208
response = await async_client.agents.with_raw_response.create(
213209
agent_config={
214-
"enable_session_persistence": True,
215210
"instructions": "instructions",
216211
"model": "model",
217212
},
@@ -226,7 +221,6 @@ async def test_raw_response_create(self, async_client: AsyncLlamaStackClient) ->
226221
async def test_streaming_response_create(self, async_client: AsyncLlamaStackClient) -> None:
227222
async with async_client.agents.with_streaming_response.create(
228223
agent_config={
229-
"enable_session_persistence": True,
230224
"instructions": "instructions",
231225
"model": "model",
232226
},

0 commit comments

Comments
 (0)