|
2 | 2 |
|
3 | 3 | `OnnxStack.StableDiffusion` is a library that provides higher-level Stable Diffusion services for use in .NET applications. It offers extensive support for features such as dependency injection, .NET configuration implementations, ASP.NET Core integration, and IHostedService support.
|
4 | 4 |
|
5 |
| -## Getting Started |
6 |
| - |
7 |
| -OnnxStack.StableDiffusion can be found via the nuget package manager, download and install it. |
8 |
| -``` |
9 |
| -PM> Install-Package OnnxStack.StableDiffusion |
10 |
| -``` |
11 |
| - |
12 |
| -### Microsoft.ML.OnnxRuntime |
13 |
| -Depending on the devices you have and the platform you are running on, you will want to install the Microsoft.ML.OnnxRuntime package that best suits your needs. |
14 |
| - |
15 |
| -### CPU-GPU via Microsoft Drirect ML |
16 |
| -``` |
17 |
| -PM> Install-Package Microsoft.ML.OnnxRuntime.DirectML |
18 |
| -``` |
19 |
| - |
20 |
| -### GPU support for both NVIDIA and AMD? |
21 |
| -``` |
22 |
| -PM> Install-Package Microsoft.ML.OnnxRuntime.Gpu |
23 |
| -``` |
24 |
| - |
25 |
| - |
26 |
| - |
27 |
| -### .NET Core Registration |
28 |
| - |
29 |
| -You can easily integrate `OnnxStack.StableDiffusion` into your application services layer. This registration process sets up the necessary services and loads the `appsettings.json` configuration. |
30 |
| - |
31 |
| -Example: Registering OnnxStack.StableDiffusion |
32 |
| -```csharp |
33 |
| -builder.Services.AddOnnxStackStableDiffusion(); |
34 |
| -``` |
35 |
| - |
36 |
| - |
37 |
| - |
38 |
| - |
39 |
| -## .NET Console Application Example |
40 |
| - |
41 |
| -Required Nuget Packages for example |
42 |
| -```nuget |
43 |
| -Microsoft.Extensions.Hosting |
44 |
| -Microsoft.Extensions.Logging |
45 |
| -``` |
46 |
| - |
47 |
| -```csharp |
48 |
| -using Microsoft.Extensions.DependencyInjection; |
49 |
| -using Microsoft.Extensions.Hosting; |
50 |
| -using OnnxStack.StableDiffusion.Common; |
51 |
| -using OnnxStack.StableDiffusion.Config; |
52 |
| - |
53 |
| -internal class Program |
54 |
| -{ |
55 |
| - static async Task Main(string[] _) |
56 |
| - { |
57 |
| - var builder = Host.CreateApplicationBuilder(); |
58 |
| - builder.Logging.ClearProviders(); |
59 |
| - builder.Services.AddLogging((loggingBuilder) => loggingBuilder.SetMinimumLevel(LogLevel.Error)); |
60 |
| - |
61 |
| - // Add OnnxStack Stable Diffusion |
62 |
| - builder.Services.AddOnnxStackStableDiffusion(); |
63 |
| - |
64 |
| - // Add AppService |
65 |
| - builder.Services.AddHostedService<AppService>(); |
66 |
| - |
67 |
| - // Start |
68 |
| - await builder.Build().RunAsync(); |
69 |
| - } |
70 |
| -} |
71 |
| - |
72 |
| -internal class AppService : IHostedService |
73 |
| -{ |
74 |
| - private readonly string _outputDirectory; |
75 |
| - private readonly IStableDiffusionService _stableDiffusionService; |
76 |
| - |
77 |
| - public AppService(IStableDiffusionService stableDiffusionService) |
78 |
| - { |
79 |
| - _stableDiffusionService = stableDiffusionService; |
80 |
| - _outputDirectory = Path.Combine(Directory.GetCurrentDirectory(), "Images"); |
81 |
| - } |
82 |
| - |
83 |
| - public async Task StartAsync(CancellationToken cancellationToken) |
84 |
| - { |
85 |
| - Directory.CreateDirectory(_outputDirectory); |
86 |
| - |
87 |
| - while (true) |
88 |
| - { |
89 |
| - System.Console.WriteLine("Please type a prompt and press ENTER"); |
90 |
| - var prompt = System.Console.ReadLine(); |
91 |
| - |
92 |
| - System.Console.WriteLine("Please type a negative prompt and press ENTER (optional)"); |
93 |
| - var negativePrompt = System.Console.ReadLine(); |
94 |
| - |
95 |
| - |
96 |
| - // Example only, full config depends on model |
97 |
| - // appsettings.json is recommended for ease of use |
98 |
| - var modelOptions = new ModelOptions |
99 |
| - { |
100 |
| - Name = "Stable Diffusion 1.5", |
101 |
| - ExecutionProvider = ExecutionProvider.DirectML, |
102 |
| - ModelConfigurations = new List<OnnxModelSessionConfig> |
103 |
| - { |
104 |
| - new OnnxModelSessionConfig |
105 |
| - { |
106 |
| - Type = OnnxModelType.Unet, |
107 |
| - OnnxModelPath = "model path" |
108 |
| - } |
109 |
| - } |
110 |
| - }; |
111 |
| - |
112 |
| - var promptOptions = new PromptOptions |
113 |
| - { |
114 |
| - Prompt = prompt, |
115 |
| - NegativePrompt = negativePrompt, |
116 |
| - DiffuserType = DiffuserType.TextToImage, |
117 |
| - |
118 |
| - // Input for ImageToImage |
119 |
| - // InputImage = new InputImage(File.ReadAllBytesAsync("image to image filename")) |
120 |
| - }; |
121 |
| - |
122 |
| - var schedulerOptions = new SchedulerOptions |
123 |
| - { |
124 |
| - Seed = Random.Shared.Next(), |
125 |
| - GuidanceScale = 7.5f, |
126 |
| - InferenceSteps = 30, |
127 |
| - Height = 512, |
128 |
| - Width = 512, |
129 |
| - SchedulerType = SchedulerType.LMS, |
130 |
| - }; |
131 |
| - |
132 |
| - |
133 |
| - // Generate Image Example |
134 |
| - var outputFilename = Path.Combine(_outputDirectory, $"{schedulerOptions.Seed}_{schedulerOptions.SchedulerType}.png"); |
135 |
| - var result = await _stableDiffusionService.GenerateAsImageAsync(modelOptions, promptOptions, schedulerOptions); |
136 |
| - if (result is not null) |
137 |
| - { |
138 |
| - // Save image to disk |
139 |
| - await result.SaveAsPngAsync(outputFilename); |
140 |
| - } |
141 |
| - |
142 |
| - |
143 |
| - |
144 |
| - |
145 |
| - // Generate Batch Example |
146 |
| - var batchOptions = new BatchOptions |
147 |
| - { |
148 |
| - BatchType = BatchOptionType.Seed, |
149 |
| - ValueTo = 20 |
150 |
| - }; |
151 |
| - |
152 |
| - await foreach (var batchResult in _stableDiffusionService.GenerateBatchAsImageAsync(modelOptions, promptOptions, schedulerOptions, batchOptions)) |
153 |
| - { |
154 |
| - // Save image to disk |
155 |
| - await batchResult.SaveAsPngAsync(outputFilename); |
156 |
| - } |
157 |
| - |
158 |
| - |
159 |
| - } |
160 |
| - } |
161 |
| - |
162 |
| - public Task StopAsync(CancellationToken cancellationToken) |
163 |
| - { |
164 |
| - return Task.CompletedTask; |
165 |
| - } |
166 |
| -} |
167 |
| -``` |
168 |
| - |
169 |
| - |
170 |
| -## Configuration |
171 |
| -The `appsettings.json` is the easiest option for configuring model sets. Below is an example of `Stable Diffusion 1.5`. |
172 |
| -The example adds the necessary paths to each model file required for Stable Diffusion, as well as any model-specific configurations. |
173 |
| -Each model can be assigned to its own device, which is handy if you have only a small GPU. This way, you can offload only what you need. There are limitations depending on the version of the `Microsoft.ML.OnnxRuntime` package you are using, but in most cases, you can split the load between CPU and GPU. |
174 |
| - |
175 |
| -```json |
176 |
| -{ |
177 |
| - "Logging": { |
178 |
| - "LogLevel": { |
179 |
| - "Default": "Information", |
180 |
| - "Microsoft.AspNetCore": "Warning" |
181 |
| - } |
182 |
| - }, |
183 |
| - |
184 |
| - "OnnxStackConfig": { |
185 |
| - "Name": "StableDiffusion 1.5", |
186 |
| - "IsEnabled": true, |
187 |
| - "PadTokenId": 49407, |
188 |
| - "BlankTokenId": 49407, |
189 |
| - "TokenizerLimit": 77, |
190 |
| - "EmbeddingsLength": 768, |
191 |
| - "ScaleFactor": 0.18215, |
192 |
| - "PipelineType": "StableDiffusion", |
193 |
| - "Diffusers": [ |
194 |
| - "TextToImage", |
195 |
| - "ImageToImage", |
196 |
| - "ImageInpaintLegacy" |
197 |
| - ], |
198 |
| - "DeviceId": 0, |
199 |
| - "InterOpNumThreads": 0, |
200 |
| - "IntraOpNumThreads": 0, |
201 |
| - "ExecutionMode": "ORT_SEQUENTIAL", |
202 |
| - "ExecutionProvider": "DirectML", |
203 |
| - "ModelConfigurations": [ |
204 |
| - { |
205 |
| - "Type": "Tokenizer", |
206 |
| - "OnnxModelPath": "D:\\Repositories\\stable-diffusion-v1-5\\cliptokenizer.onnx" |
207 |
| - }, |
208 |
| - { |
209 |
| - "Type": "Unet", |
210 |
| - "OnnxModelPath": "D:\\Repositories\\stable-diffusion-v1-5\\unet\\model.onnx" |
211 |
| - }, |
212 |
| - { |
213 |
| - "Type": "TextEncoder", |
214 |
| - "OnnxModelPath": "D:\\Repositories\\stable-diffusion-v1-5\\text_encoder\\model.onnx" |
215 |
| - }, |
216 |
| - { |
217 |
| - "Type": "VaeEncoder", |
218 |
| - "OnnxModelPath": "D:\\Repositories\\stable-diffusion-v1-5\\vae_encoder\\model.onnx" |
219 |
| - }, |
220 |
| - { |
221 |
| - "Type": "VaeDecoder", |
222 |
| - "OnnxModelPath": "D:\\Repositories\\stable-diffusion-v1-5\\vae_decoder\\model.onnx" |
223 |
| - } |
224 |
| - ] |
225 |
| - } |
226 |
| -} |
227 |
| -``` |
228 | 5 |
|
229 | 6 | ### **Prompt**
|
230 | 7 |
|
@@ -314,19 +91,14 @@ https://user-images.githubusercontent.com/4353684/285547207-3a7ea067-fcbf-47f0-9
|
314 | 91 | ## ONNX Model Download
|
315 | 92 | You will need an ONNX compatible model to use, Hugging Face is a great place to download the Stable Diffusion models
|
316 | 93 |
|
317 |
| -Download the [ONNX Stable Diffusion models from Hugging Face](https://huggingface.co/models?sort=downloads&search=Stable+Diffusion). |
318 |
| - |
319 |
| -- [Stable Diffusion Models v1.4](https://huggingface.co/CompVis/stable-diffusion-v1-4/tree/onnx) |
320 |
| -- [Stable Diffusion Models v1.5](https://huggingface.co/runwayml/stable-diffusion-v1-5/tree/onnx) |
321 |
| - |
322 |
| - |
323 |
| -Once you have selected a model version repo, click `Files and Versions`, then select the `ONNX` branch. If there isn't an ONNX model branch available, use the `main` branch and convert it to ONNX. See the [ONNX conversion tutorial for PyTorch](https://learn.microsoft.com/windows/ai/windows-ml/tutorials/pytorch-convert-model) for more information. |
324 |
| - |
325 |
| -Clone the model repo: |
326 |
| -```text |
327 |
| -git lfs install |
328 |
| -git clone https://huggingface.co/runwayml/stable-diffusion-v1-5 -b onnx |
329 |
| -``` |
| 94 | +- [OpenJourney V4](https://huggingface.co/TheyCallMeHex/OpenJourney-V4-ONNX) |
| 95 | +- [DreamLike PhotoReal 2.0](https://huggingface.co/TheyCallMeHex/DreamLike-PhotoReal-2.0-ONNX) |
| 96 | +- [CyberPunk Anime Diffusion](https://huggingface.co/TheyCallMeHex/Cyberpunk-Anime-Diffusion-ONNX) |
| 97 | +- [InkPunk Diffusion](https://huggingface.co/TheyCallMeHex/Inkpunk-Diffusion-ONNX) |
| 98 | +- [Mo-Di Diffusion](https://huggingface.co/TheyCallMeHex/Mo-Di-Diffusion-ONNX) |
| 99 | +- [epiCRealism](https://huggingface.co/TheyCallMeHex/epiCRealism-ONNX) |
| 100 | +- [Comic Diffusion](https://huggingface.co/TheyCallMeHex/Comic-Diffusion-ONNX) |
| 101 | +- [Redshift Diffusion](https://huggingface.co/TheyCallMeHex/Redshift-Diffusion-ONNX) |
330 | 102 |
|
331 | 103 |
|
332 | 104 | ## Resources
|
|
0 commit comments