Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions .github/actions/setup-runtimes-caching/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -118,3 +118,8 @@ runs:
dapr init --runtime-version=${{ env.DAPR_VERSION }}
dapr --version

- uses: ai-action/setup-ollama@v1
name: Setup Ollama
if: ${{ inputs.name == 'Full' || contains(inputs.name, 'Hosting.Ollama') }}
with:
version: 0.11.8
Comment on lines +121 to +125
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think it's best that we set it up on all Ollama tests runs, as we should do CI on both the container and non-container versions of Ollama

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I agree, to avoid any extra workflow/actions setup on non-ollama tests I felt like this was as specific as I could get. The AppHostTests that test both container/non-container versions live in a single test project (CommunityToolkit.Aspire.Hosting.Ollama.Tests)

Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
var builder = DistributedApplication.CreateBuilder(args);

var ollama = builder.AddOllama("ollama")
.WithDataVolume()
var ollama = builder.AddOllamaLocal("ollama")
.WithOpenWebUI();

var phi3 = ollama.AddModel("phi3", "phi3");
Expand Down
41 changes: 41 additions & 0 deletions src/CommunityToolkit.Aspire.Hosting.Ollama/IOllamaResource.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
namespace Aspire.Hosting.ApplicationModel;

/// <summary>
/// Represents an Ollama resource.
/// </summary>
public interface IOllamaResource : IResourceWithConnectionString, IResourceWithEndpoints
{
/// <summary>
/// Gets the list of models to download on initial startup.
/// </summary>
IReadOnlyList<string> Models { get; }

/// <summary>
/// Gets the endpoint for the Ollama server.
/// </summary>
EndpointReference PrimaryEndpoint { get; }

/// <summary>
/// Gets the host endpoint reference for this resource.
/// </summary>
EndpointReferenceExpression Host { get; }

/// <summary>
/// Gets the port endpoint reference for this resource.
/// </summary>
EndpointReferenceExpression Port { get; }

/// <summary>
/// Gets the connection URI expression for the Ollama server.
/// </summary>
/// <remarks>
/// Format: <c>http://{host}:{port}</c>.
/// </remarks>
ReferenceExpression UriExpression { get; }

/// <summary>
/// Adds a model to the list of models to download on initial startup.
/// </summary>
/// <param name="modelName">The name of the model</param>
void AddModel(string modelName);
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
namespace Aspire.Hosting.ApplicationModel;

/// <summary>
/// A resource that represents an Ollama executable resource.
/// </summary>
/// <remarks>
/// Constructs an <see cref="OllamaExecutableResource"/>.
/// </remarks>
/// <param name="name">The name of the resource.</param>
public class OllamaExecutableResource(string name) : ExecutableResource(name, "ollama", string.Empty), IOllamaResource
{
internal const string OllamaEndpointName = "http";
internal const int DefaultHttpPort = 11434;

private readonly List<string> _models = [];

private EndpointReference? _primaryEndpointReference;

/// <inheritdoc/>
public IReadOnlyList<string> Models => _models;

/// <inheritdoc/>
public EndpointReference PrimaryEndpoint => _primaryEndpointReference ??= new(this, OllamaEndpointName);

/// <inheritdoc/>
public EndpointReferenceExpression Host => PrimaryEndpoint.Property(EndpointProperty.Host);

/// <inheritdoc/>
public EndpointReferenceExpression Port => PrimaryEndpoint.Property(EndpointProperty.Port);

/// <summary>
/// Gets the connection string expression for the Ollama server.
/// </summary>
public ReferenceExpression ConnectionStringExpression =>
ReferenceExpression.Create(
$"Endpoint={PrimaryEndpoint.Property(EndpointProperty.Scheme)}://{PrimaryEndpoint.Property(EndpointProperty.Host)}:{PrimaryEndpoint.Property(EndpointProperty.Port)}"
);

/// <inheritdoc/>
public ReferenceExpression UriExpression => ReferenceExpression.Create($"{PrimaryEndpoint.Property(EndpointProperty.Scheme)}://{Host}:{Port}");

/// <inheritdoc/>
public void AddModel(string modelName)
{
ArgumentException.ThrowIfNullOrEmpty(modelName, nameof(modelName));
if (!_models.Contains(modelName))
{
_models.Add(modelName);
}
}

IEnumerable<KeyValuePair<string, ReferenceExpression>> IResourceWithConnectionString.GetConnectionProperties()
{
yield return new("Host", ReferenceExpression.Create($"{Host}"));
yield return new("Port", ReferenceExpression.Create($"{Port}"));
yield return new("Uri", UriExpression);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -9,12 +9,12 @@ namespace Aspire.Hosting.ApplicationModel;
/// <param name="name">The name for the resource.</param>
/// <param name="modelName">The name of the LLM model, can include a tag.</param>
/// <param name="parent">The <see cref="OllamaResource"/> parent.</param>
public class OllamaModelResource(string name, string modelName, OllamaResource parent) : Resource(name), IResourceWithParent<OllamaResource>, IResourceWithConnectionString
public class OllamaModelResource(string name, string modelName, IOllamaResource parent) : Resource(name), IResourceWithParent<IOllamaResource>, IResourceWithConnectionString
{
/// <summary>
/// Gets the parent Ollama container resource.
/// Gets the parent Ollama resource.
/// </summary>
public OllamaResource Parent { get; } = ThrowIfNull(parent);
public IOllamaResource Parent { get; } = ThrowIfNull(parent);

/// <summary>
/// Gets the connection string expression for the Ollama model.
Expand Down
34 changes: 9 additions & 25 deletions src/CommunityToolkit.Aspire.Hosting.Ollama/OllamaResource.cs
Original file line number Diff line number Diff line change
Expand Up @@ -7,32 +7,24 @@
/// Constructs an <see cref="OllamaResource"/>.
/// </remarks>
/// <param name="name">The name for the resource.</param>
public class OllamaResource(string name) : ContainerResource(name), IResourceWithConnectionString
public class OllamaResource(string name) : ContainerResource(name), IOllamaResource
{
internal const string OllamaEndpointName = "http";

private readonly List<string> _models = [];

private EndpointReference? _primaryEndpointReference;

/// <summary>
/// Adds a model to the list of models to download on initial startup.
/// </summary>
/// <inheritdoc/>
public IReadOnlyList<string> Models => _models;

/// <summary>
/// Gets the endpoint for the Ollama server.
/// </summary>
/// <inheritdoc/>
public EndpointReference PrimaryEndpoint => _primaryEndpointReference ??= new(this, OllamaEndpointName);

/// <summary>
/// Gets the host endpoint reference for this resource.
/// </summary>

/// <inheritdoc/>
public EndpointReferenceExpression Host => PrimaryEndpoint.Property(EndpointProperty.Host);

/// <summary>
/// Gets the port endpoint reference for this resource.
/// </summary>
/// <inheritdoc/>
public EndpointReferenceExpression Port => PrimaryEndpoint.Property(EndpointProperty.Port);

/// <summary>
Expand All @@ -42,19 +34,11 @@ public class OllamaResource(string name) : ContainerResource(name), IResourceWit
ReferenceExpression.Create(
$"Endpoint={PrimaryEndpoint.Property(EndpointProperty.Scheme)}://{PrimaryEndpoint.Property(EndpointProperty.Host)}:{PrimaryEndpoint.Property(EndpointProperty.Port)}"
);

/// <summary>
/// Gets the connection URI expression for the Ollama server.
/// </summary>
/// <remarks>
/// Format: <c>http://{host}:{port}</c>.
/// </remarks>

/// <inheritdoc/>
public ReferenceExpression UriExpression => ReferenceExpression.Create($"{PrimaryEndpoint.Property(EndpointProperty.Scheme)}://{Host}:{Port}");

/// <summary>
/// Adds a model to the list of models to download on initial startup.
/// </summary>
/// <param name="modelName">The name of the model</param>
/// <inheritdoc/>
public void AddModel(string modelName)
{
ArgumentException.ThrowIfNullOrEmpty(modelName, nameof(modelName));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,12 +11,12 @@ namespace Aspire.Hosting;
public static partial class OllamaResourceBuilderExtensions
{
/// <summary>
/// Adds a model to the Ollama container.
/// Adds a model to the Ollama resource.
/// </summary>
/// <param name="builder">The <see cref="IDistributedApplicationBuilder"/>.</param>
/// <param name="modelName">The name of the LLM to download on initial startup.</param>
/// <returns>A reference to the <see cref="IResourceBuilder{T}"/>.</returns>
public static IResourceBuilder<OllamaModelResource> AddModel(this IResourceBuilder<OllamaResource> builder, string modelName)
public static IResourceBuilder<OllamaModelResource> AddModel(this IResourceBuilder<IOllamaResource> builder, string modelName)
{
ArgumentNullException.ThrowIfNull(builder, nameof(builder));
ArgumentException.ThrowIfNullOrWhiteSpace(modelName, nameof(modelName));
Expand All @@ -28,13 +28,13 @@ public static IResourceBuilder<OllamaModelResource> AddModel(this IResourceBuild
}

/// <summary>
/// Adds a model to the Ollama container.
/// Adds a model to the Ollama resource.
/// </summary>
/// <param name="builder">The <see cref="IDistributedApplicationBuilder"/>.</param>
/// <param name="name">The name of the resource.</param>
/// <param name="modelName">The name of the LLM to download on initial startup.</param>
/// <returns>A reference to the <see cref="IResourceBuilder{T}"/>.</returns>
public static IResourceBuilder<OllamaModelResource> AddModel(this IResourceBuilder<OllamaResource> builder, [ResourceName] string name, string modelName)
public static IResourceBuilder<OllamaModelResource> AddModel(this IResourceBuilder<IOllamaResource> builder, [ResourceName] string name, string modelName)
{
ArgumentNullException.ThrowIfNull(builder, nameof(builder));
ArgumentException.ThrowIfNullOrWhiteSpace(modelName, nameof(modelName));
Expand All @@ -55,13 +55,13 @@ public static IResourceBuilder<OllamaModelResource> AddModel(this IResourceBuild
}

/// <summary>
/// Adds a model from Hugging Face to the Ollama container. Only models in GGUF format are supported.
/// Adds a model from Hugging Face to the Ollama resource. Only models in GGUF format are supported.
/// </summary>
/// <param name="builder">The <see cref="IDistributedApplicationBuilder"/>.</param>
/// <param name="name">The name of the resource.</param>
/// <param name="modelName">The name of the LLM from Hugging Face in GGUF format to download on initial startup.</param>
/// <returns>A reference to the <see cref="IResourceBuilder{T}"/>.</returns>
public static IResourceBuilder<OllamaModelResource> AddHuggingFaceModel(this IResourceBuilder<OllamaResource> builder, [ResourceName] string name, string modelName)
public static IResourceBuilder<OllamaModelResource> AddHuggingFaceModel(this IResourceBuilder<IOllamaResource> builder, [ResourceName] string name, string modelName)
{
ArgumentNullException.ThrowIfNull(builder, nameof(builder));
ArgumentException.ThrowIfNullOrWhiteSpace(modelName, nameof(modelName));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ namespace Aspire.Hosting;
public static partial class OllamaResourceBuilderExtensions
{
/// <summary>
/// Adds an administration web UI Ollama to the application model using Open WebUI. This version the package defaults to the main tag of the Open WebUI container image
/// Adds an Open WebUI container to the application model for administering Ollama. This version of the package defaults to the main tag of the Open WebUI container image.
/// </summary>
/// <example>
/// Use in application host with an Ollama resource
Expand All @@ -28,7 +28,8 @@ public static partial class OllamaResourceBuilderExtensions
/// <param name="containerName">The name of the container (Optional).</param>
/// <returns>A reference to the <see cref="IResourceBuilder{T}"/>.</returns>
/// <remarks>See https://openwebui.com for more information about Open WebUI</remarks>
public static IResourceBuilder<T> WithOpenWebUI<T>(this IResourceBuilder<T> builder, Action<IResourceBuilder<OpenWebUIResource>>? configureContainer = null, string? containerName = null) where T : OllamaResource
public static IResourceBuilder<T> WithOpenWebUI<T>(this IResourceBuilder<T> builder, Action<IResourceBuilder<OpenWebUIResource>>? configureContainer = null, string? containerName = null)
where T : class, IOllamaResource
{
ArgumentNullException.ThrowIfNull(builder, nameof(builder));

Expand Down Expand Up @@ -95,6 +96,21 @@ private static void ConfigureOpenWebUIContainer(EnvironmentCallbackContext conte
context.EnvironmentVariables.Add("ENABLE_SIGNUP", "false");
context.EnvironmentVariables.Add("ENABLE_COMMUNITY_SHARING", "false"); // by default don't enable sharing
context.EnvironmentVariables.Add("WEBUI_AUTH", "false"); // https://docs.openwebui.com/#quick-start-with-docker--recommended
context.EnvironmentVariables.Add("OLLAMA_BASE_URLS", string.Join(";", resource.OllamaResources.Select(resource => $"http://{resource.Name}:{resource.PrimaryEndpoint.TargetPort}")));

ReferenceExpressionBuilder builder = new();

for (int i = 0; i < resource.OllamaResources.Count; i++)
{
var ollama = resource.OllamaResources[i];
builder.Append($"{ollama.PrimaryEndpoint}");

if (i != resource.OllamaResources.Count - 1)
builder.AppendLiteral(";");
}

if (!builder.IsEmpty)
{
context.EnvironmentVariables["OLLAMA_BASE_URLS"] = builder.Build();
}
}
}
Loading
Loading