Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 7 additions & 6 deletions src/strands/models/ollama.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,14 +68,12 @@ def __init__(
ollama_client_args: Additional arguments for the Ollama client.
**model_config: Configuration options for the Ollama model.
"""
self.host = host
self.client_args = ollama_client_args or {}
self.config = OllamaModel.OllamaConfig(**model_config)

logger.debug("config=<%s> | initializing", self.config)

ollama_client_args = ollama_client_args if ollama_client_args is not None else {}

self.client = ollama.AsyncClient(host, **ollama_client_args)

@override
def update_config(self, **model_config: Unpack[OllamaConfig]) -> None: # type: ignore
"""Update the Ollama Model configuration with the provided arguments.
Expand Down Expand Up @@ -306,7 +304,8 @@ async def stream(
logger.debug("invoking model")
tool_requested = False

response = await self.client.chat(**request)
client = ollama.AsyncClient(self.host, **self.client_args)
response = await client.chat(**request)

logger.debug("got response from model")
yield self.format_chunk({"chunk_type": "message_start"})
Expand Down Expand Up @@ -346,7 +345,9 @@ async def structured_output(
formatted_request = self.format_request(messages=prompt)
formatted_request["format"] = output_model.model_json_schema()
formatted_request["stream"] = False
response = await self.client.chat(**formatted_request)

client = ollama.AsyncClient(self.host, **self.client_args)
response = await client.chat(**formatted_request)

try:
content = response.message.content.strip()
Expand Down
Loading