Skip to content

Commit 47ecb87

Browse files
committed
Merge branch 'main' of https://github.com/strands-agents/sdk-python into async-tools
2 parents 341e1d1 + f78b03a commit 47ecb87

37 files changed

+1101
-91
lines changed

.github/workflows/integration-test.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -69,4 +69,4 @@ jobs:
6969
AWS_REGION_NAME: us-east-1 # Needed for LiteLLM
7070
id: tests
7171
run: |
72-
hatch test tests-integ
72+
hatch test tests_integ

pyproject.toml

Lines changed: 11 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,7 @@ dependencies = [
3535
"watchdog>=6.0.0,<7.0.0",
3636
"opentelemetry-api>=1.30.0,<2.0.0",
3737
"opentelemetry-sdk>=1.30.0,<2.0.0",
38+
"opentelemetry-instrumentation-threading>=0.51b0,<1.00b0",
3839
]
3940

4041
[project.urls]
@@ -82,8 +83,12 @@ openai = [
8283
otel = [
8384
"opentelemetry-exporter-otlp-proto-http>=1.30.0,<2.0.0",
8485
]
86+
writer = [
87+
"writer-sdk>=2.2.0,<3.0.0"
88+
]
89+
8590
a2a = [
86-
"a2a-sdk>=0.2.6",
91+
"a2a-sdk[sql]>=0.2.11",
8792
"uvicorn>=0.34.2",
8893
"httpx>=0.28.1",
8994
"fastapi>=0.115.12",
@@ -95,7 +100,7 @@ a2a = [
95100
source = "vcs"
96101

97102
[tool.hatch.envs.hatch-static-analysis]
98-
features = ["anthropic", "litellm", "llamaapi", "ollama", "openai", "otel","mistral"]
103+
features = ["anthropic", "litellm", "llamaapi", "ollama", "openai", "otel", "mistral", "writer"]
99104
dependencies = [
100105
"mypy>=1.15.0,<2.0.0",
101106
"ruff>=0.11.6,<0.12.0",
@@ -119,7 +124,7 @@ lint-fix = [
119124
]
120125

121126
[tool.hatch.envs.hatch-test]
122-
features = ["anthropic", "litellm", "llamaapi", "ollama", "openai", "otel","mistral"]
127+
features = ["anthropic", "litellm", "llamaapi", "ollama", "openai", "otel", "mistral", "writer"]
123128
extra-dependencies = [
124129
"moto>=5.1.0,<6.0.0",
125130
"pytest>=8.0.0,<9.0.0",
@@ -135,7 +140,7 @@ extra-args = [
135140

136141
[tool.hatch.envs.dev]
137142
dev-mode = true
138-
features = ["dev", "docs", "anthropic", "litellm", "llamaapi", "ollama", "otel","mistral"]
143+
features = ["dev", "docs", "anthropic", "litellm", "llamaapi", "ollama", "otel", "mistral", "writer"]
139144

140145
[tool.hatch.envs.a2a]
141146
dev-mode = true
@@ -190,7 +195,7 @@ test = [
190195
"hatch test --cover --cov-report html --cov-report xml {args}"
191196
]
192197
test-integ = [
193-
"hatch test tests-integ {args}"
198+
"hatch test tests_integ {args}"
194199
]
195200
prepare = [
196201
"hatch fmt --linter",
@@ -225,7 +230,7 @@ ignore_missing_imports = true
225230

226231
[tool.ruff]
227232
line-length = 120
228-
include = ["examples/**/*.py", "src/**/*.py", "tests/**/*.py", "tests-integ/**/*.py"]
233+
include = ["examples/**/*.py", "src/**/*.py", "tests/**/*.py", "tests_integ/**/*.py"]
229234

230235
[tool.ruff.lint]
231236
select = [

src/strands/agent/agent.py

Lines changed: 1 addition & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@
3030
from ..types.content import ContentBlock, Message, Messages
3131
from ..types.exceptions import ContextWindowOverflowException
3232
from ..types.models import Model
33-
from ..types.tools import ToolConfig, ToolResult, ToolUse
33+
from ..types.tools import ToolResult, ToolUse
3434
from ..types.traces import AttributeValue
3535
from .agent_result import AgentResult
3636
from .conversation_manager import (
@@ -324,15 +324,6 @@ def tool_names(self) -> list[str]:
324324
all_tools = self.tool_registry.get_all_tools_config()
325325
return list(all_tools.keys())
326326

327-
@property
328-
def tool_config(self) -> ToolConfig:
329-
"""Get the tool configuration for this agent.
330-
331-
Returns:
332-
The complete tool configuration.
333-
"""
334-
return self.tool_registry.initialize_tool_config()
335-
336327
def __call__(self, prompt: str, **kwargs: Any) -> AgentResult:
337328
"""Process a natural language prompt through the agent's event loop.
338329

src/strands/event_loop/event_loop.py

Lines changed: 9 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111
import logging
1212
import time
1313
import uuid
14-
from typing import TYPE_CHECKING, Any, AsyncGenerator
14+
from typing import TYPE_CHECKING, Any, AsyncGenerator, cast
1515

1616
from ..experimental.hooks import AfterToolInvocationEvent, BeforeToolInvocationEvent
1717
from ..experimental.hooks.registry import get_registry
@@ -21,7 +21,7 @@
2121
from ..types.content import Message
2222
from ..types.exceptions import ContextWindowOverflowException, EventLoopException, ModelThrottledException
2323
from ..types.streaming import Metrics, StopReason
24-
from ..types.tools import ToolGenerator, ToolResult, ToolUse
24+
from ..types.tools import ToolChoice, ToolChoiceAuto, ToolConfig, ToolGenerator, ToolResult, ToolUse
2525
from .message_processor import clean_orphaned_empty_tool_uses
2626
from .streaming import stream_messages
2727

@@ -112,10 +112,12 @@ async def event_loop_cycle(agent: "Agent", kwargs: dict[str, Any]) -> AsyncGener
112112
model_id=model_id,
113113
)
114114

115+
tool_specs = agent.tool_registry.get_all_tool_specs()
116+
115117
try:
116118
# TODO: To maintain backwards compatibility, we need to combine the stream event with kwargs before yielding
117119
# to the callback handler. This will be revisited when migrating to strongly typed events.
118-
async for event in stream_messages(agent.model, agent.system_prompt, agent.messages, agent.tool_config):
120+
async for event in stream_messages(agent.model, agent.system_prompt, agent.messages, tool_specs):
119121
if "callback" in event:
120122
yield {"callback": {**event["callback"], **(kwargs if "delta" in event["callback"] else {})}}
121123

@@ -172,12 +174,6 @@ async def event_loop_cycle(agent: "Agent", kwargs: dict[str, Any]) -> AsyncGener
172174

173175
# If the model is requesting to use tools
174176
if stop_reason == "tool_use":
175-
if agent.tool_config is None:
176-
raise EventLoopException(
177-
Exception("Model requested tool use but no tool config provided"),
178-
kwargs["request_state"],
179-
)
180-
181177
# Handle tool execution
182178
events = _handle_tool_execution(
183179
stop_reason,
@@ -282,7 +278,10 @@ async def run_tool(agent: "Agent", tool_use: ToolUse, kwargs: dict[str, Any]) ->
282278
"model": agent.model,
283279
"system_prompt": agent.system_prompt,
284280
"messages": agent.messages,
285-
"tool_config": agent.tool_config,
281+
"tool_config": ToolConfig( # for backwards compatability
282+
tools=[{"toolSpec": tool_spec} for tool_spec in agent.tool_registry.get_all_tool_specs()],
283+
toolChoice=cast(ToolChoice, {"auto": ToolChoiceAuto()}),
284+
),
286285
}
287286
)
288287

src/strands/event_loop/streaming.py

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@
1919
StreamEvent,
2020
Usage,
2121
)
22-
from ..types.tools import ToolConfig, ToolUse
22+
from ..types.tools import ToolSpec, ToolUse
2323

2424
logger = logging.getLogger(__name__)
2525

@@ -304,24 +304,23 @@ async def stream_messages(
304304
model: Model,
305305
system_prompt: Optional[str],
306306
messages: Messages,
307-
tool_config: Optional[ToolConfig],
307+
tool_specs: list[ToolSpec],
308308
) -> AsyncGenerator[dict[str, Any], None]:
309309
"""Streams messages to the model and processes the response.
310310
311311
Args:
312312
model: Model provider.
313313
system_prompt: The system prompt to send.
314314
messages: List of messages to send.
315-
tool_config: Configuration for the tools to use.
315+
tool_specs: The list of tool specs.
316316
317317
Returns:
318318
The reason for stopping, the final message, and the usage metrics
319319
"""
320320
logger.debug("model=<%s> | streaming messages", model)
321321

322322
messages = remove_blank_messages_content_text(messages)
323-
tool_specs = [tool["toolSpec"] for tool in tool_config.get("tools", [])] or None if tool_config else None
324323

325-
chunks = model.converse(messages, tool_specs, system_prompt)
324+
chunks = model.converse(messages, tool_specs if tool_specs else None, system_prompt)
326325
async for event in process_stream(chunks, messages):
327326
yield event

0 commit comments

Comments
 (0)