@@ -89,13 +89,57 @@ a2a = [
8989 " fastapi>=0.115.12" ,
9090 " starlette>=0.46.2" ,
9191]
92+ all = [
93+ # anthropic
94+ " anthropic>=0.21.0,<1.0.0" ,
95+
96+ # dev
97+ " commitizen>=4.4.0,<5.0.0" ,
98+ " hatch>=1.0.0,<2.0.0" ,
99+ " moto>=5.1.0,<6.0.0" ,
100+ " mypy>=1.15.0,<2.0.0" ,
101+ " pre-commit>=3.2.0,<4.2.0" ,
102+ " pytest>=8.0.0,<9.0.0" ,
103+ " pytest-asyncio>=0.26.0,<0.27.0" ,
104+ " ruff>=0.4.4,<0.5.0" ,
105+
106+ # docs
107+ " sphinx>=5.0.0,<6.0.0" ,
108+ " sphinx-rtd-theme>=1.0.0,<2.0.0" ,
109+ " sphinx-autodoc-typehints>=1.12.0,<2.0.0" ,
110+
111+ # litellm
112+ " litellm>=1.72.6,<1.73.0" ,
113+
114+ # llama
115+ " llama-api-client>=0.1.0,<1.0.0" ,
116+
117+ # mistral
118+ " mistralai>=1.8.2" ,
119+
120+ # ollama
121+ " ollama>=0.4.8,<1.0.0" ,
122+
123+ # openai
124+ " openai>=1.68.0,<2.0.0" ,
125+
126+ # otel
127+ " opentelemetry-exporter-otlp-proto-http>=1.30.0,<2.0.0" ,
128+
129+ # a2a
130+ " a2a-sdk>=0.2.6" ,
131+ " uvicorn>=0.34.2" ,
132+ " httpx>=0.28.1" ,
133+ " fastapi>=0.115.12" ,
134+ " starlette>=0.46.2" ,
135+ ]
92136
93137[tool .hatch .version ]
94138# Tells Hatch to use your version control system (git) to determine the version.
95139source = " vcs"
96140
97141[tool .hatch .envs .hatch-static-analysis ]
98- features = [" anthropic" , " litellm" , " llamaapi" , " ollama" , " openai" , " otel" ," mistral" ]
142+ features = [" anthropic" , " litellm" , " llamaapi" , " ollama" , " openai" , " otel" , " mistral" , " a2a " ]
99143dependencies = [
100144 " mypy>=1.15.0,<2.0.0" ,
101145 " ruff>=0.11.6,<0.12.0" ,
@@ -111,15 +155,14 @@ format-fix = [
111155]
112156lint-check = [
113157 " ruff check" ,
114- # excluding due to A2A and OTEL http exporter dependency conflict
115- " mypy -p src --exclude src/strands/multiagent"
158+ " mypy -p src"
116159]
117160lint-fix = [
118161 " ruff check --fix"
119162]
120163
121164[tool .hatch .envs .hatch-test ]
122- features = [" anthropic" , " litellm" , " llamaapi" , " ollama" , " openai" , " otel" ," mistral" ]
165+ features = [" anthropic" , " litellm" , " llamaapi" , " ollama" , " openai" , " otel" , " mistral" , " a2a " ]
123166extra-dependencies = [
124167 " moto>=5.1.0,<6.0.0" ,
125168 " pytest>=8.0.0,<9.0.0" ,
@@ -135,35 +178,17 @@ extra-args = [
135178
136179[tool .hatch .envs .dev ]
137180dev-mode = true
138- features = [" dev" , " docs" , " anthropic" , " litellm" , " llamaapi" , " ollama" , " otel" ," mistral" ]
139-
140- [tool .hatch .envs .a2a ]
141- dev-mode = true
142- features = [" dev" , " docs" , " anthropic" , " litellm" , " llamaapi" , " ollama" , " a2a" ]
143-
144- [tool .hatch .envs .a2a .scripts ]
145- run = [
146- " pytest{env:HATCH_TEST_ARGS:} tests/multiagent/a2a {args}"
147- ]
148- run-cov = [
149- " pytest{env:HATCH_TEST_ARGS:} tests/multiagent/a2a --cov --cov-config=pyproject.toml {args}"
150- ]
151- lint-check = [
152- " ruff check" ,
153- " mypy -p src/strands/multiagent/a2a"
154- ]
181+ features = [" dev" , " docs" , " anthropic" , " litellm" , " llamaapi" , " ollama" , " otel" , " mistral" ]
155182
156183[[tool .hatch .envs .hatch-test .matrix ]]
157184python = [" 3.13" , " 3.12" , " 3.11" , " 3.10" ]
158185
159186[tool .hatch .envs .hatch-test .scripts ]
160187run = [
161- # excluding due to A2A and OTEL http exporter dependency conflict
162- " pytest{env:HATCH_TEST_ARGS:} {args} --ignore=tests/multiagent/a2a"
188+ " pytest{env:HATCH_TEST_ARGS:} {args}"
163189]
164190run-cov = [
165- # excluding due to A2A and OTEL http exporter dependency conflict
166- " pytest{env:HATCH_TEST_ARGS:} --cov --cov-config=pyproject.toml {args} --ignore=tests/multiagent/a2a"
191+ " pytest{env:HATCH_TEST_ARGS:} --cov --cov-config=pyproject.toml {args}"
167192]
168193
169194cov-combine = []
@@ -198,10 +223,6 @@ prepare = [
198223 " hatch run test-lint" ,
199224 " hatch test --all"
200225]
201- test-a2a = [
202- # required to run manually due to A2A and OTEL http exporter dependency conflict
203- " hatch -e a2a run run {args}"
204- ]
205226
206227[tool .mypy ]
207228python_version = " 3.10"
0 commit comments