diff --git a/AspNetCore.sln b/AspNetCore.sln
index b0dd006c7a9a..8948e0a1d377 100644
--- a/AspNetCore.sln
+++ b/AspNetCore.sln
@@ -1772,6 +1772,8 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "MinimalFormSample", "src\An
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "MvcFormSample", "src\Mvc\samples\MvcFormSample\MvcFormSample.csproj", "{055F86AA-FB37-40CC-B39E-C29CE7547BB7}"
EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.AspNetCore.OutputCaching.Microbenchmarks", "src\Middleware\OutputCaching\perf\Microbenchmarks\Microsoft.AspNetCore.OutputCaching.Microbenchmarks.csproj", "{137AD17B-066F-4ED4-80FA-8D21C7B76CA6}"
+EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
@@ -10645,6 +10647,22 @@ Global
{055F86AA-FB37-40CC-B39E-C29CE7547BB7}.Release|x64.Build.0 = Release|Any CPU
{055F86AA-FB37-40CC-B39E-C29CE7547BB7}.Release|x86.ActiveCfg = Release|Any CPU
{055F86AA-FB37-40CC-B39E-C29CE7547BB7}.Release|x86.Build.0 = Release|Any CPU
+ {137AD17B-066F-4ED4-80FA-8D21C7B76CA6}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {137AD17B-066F-4ED4-80FA-8D21C7B76CA6}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {137AD17B-066F-4ED4-80FA-8D21C7B76CA6}.Debug|arm64.ActiveCfg = Debug|Any CPU
+ {137AD17B-066F-4ED4-80FA-8D21C7B76CA6}.Debug|arm64.Build.0 = Debug|Any CPU
+ {137AD17B-066F-4ED4-80FA-8D21C7B76CA6}.Debug|x64.ActiveCfg = Debug|Any CPU
+ {137AD17B-066F-4ED4-80FA-8D21C7B76CA6}.Debug|x64.Build.0 = Debug|Any CPU
+ {137AD17B-066F-4ED4-80FA-8D21C7B76CA6}.Debug|x86.ActiveCfg = Debug|Any CPU
+ {137AD17B-066F-4ED4-80FA-8D21C7B76CA6}.Debug|x86.Build.0 = Debug|Any CPU
+ {137AD17B-066F-4ED4-80FA-8D21C7B76CA6}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {137AD17B-066F-4ED4-80FA-8D21C7B76CA6}.Release|Any CPU.Build.0 = Release|Any CPU
+ {137AD17B-066F-4ED4-80FA-8D21C7B76CA6}.Release|arm64.ActiveCfg = Release|Any CPU
+ {137AD17B-066F-4ED4-80FA-8D21C7B76CA6}.Release|arm64.Build.0 = Release|Any CPU
+ {137AD17B-066F-4ED4-80FA-8D21C7B76CA6}.Release|x64.ActiveCfg = Release|Any CPU
+ {137AD17B-066F-4ED4-80FA-8D21C7B76CA6}.Release|x64.Build.0 = Release|Any CPU
+ {137AD17B-066F-4ED4-80FA-8D21C7B76CA6}.Release|x86.ActiveCfg = Release|Any CPU
+ {137AD17B-066F-4ED4-80FA-8D21C7B76CA6}.Release|x86.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
@@ -11519,6 +11537,7 @@ Global
{37FC77EA-AC44-4D08-B002-8EFF415C424A} = {64B2A28F-6D82-4F2B-B0BB-88DE5216DD2C}
{87D58D50-20D1-4091-88C5-8D88DCCC2DE3} = {6126DCE4-9692-4EE2-B240-C65743572995}
{055F86AA-FB37-40CC-B39E-C29CE7547BB7} = {B8825E86-B8EA-4666-B681-C443D027C95D}
+ {137AD17B-066F-4ED4-80FA-8D21C7B76CA6} = {AA5ABFBC-177C-421E-B743-005E0FD1248B}
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {3E8720B3-DBDD-498C-B383-2CC32A054E8F}
diff --git a/src/Caching/StackExchangeRedis/src/Microsoft.Extensions.Caching.StackExchangeRedis.csproj b/src/Caching/StackExchangeRedis/src/Microsoft.Extensions.Caching.StackExchangeRedis.csproj
index b5a3d703a4ab..432cfd744c93 100644
--- a/src/Caching/StackExchangeRedis/src/Microsoft.Extensions.Caching.StackExchangeRedis.csproj
+++ b/src/Caching/StackExchangeRedis/src/Microsoft.Extensions.Caching.StackExchangeRedis.csproj
@@ -16,16 +16,20 @@
-
+
+
+
+
+
+
-
diff --git a/src/Caching/StackExchangeRedis/src/RedisOutputCacheStore.Log.cs b/src/Caching/StackExchangeRedis/src/RedisOutputCacheStore.Log.cs
new file mode 100644
index 000000000000..442963c74895
--- /dev/null
+++ b/src/Caching/StackExchangeRedis/src/RedisOutputCacheStore.Log.cs
@@ -0,0 +1,20 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+#if NET7_0_OR_GREATER // IOutputCacheStore only exists from net7
+
+using System;
+using Microsoft.Extensions.Logging;
+using StackExchange.Redis;
+
+namespace Microsoft.Extensions.Caching.StackExchangeRedis;
+
+internal partial class RedisOutputCacheStore
+{
+ [LoggerMessage(1, LogLevel.Warning, "Transient error occurred executing redis output-cache GC loop.", EventName = "RedisOutputCacheGCTransientError")]
+ internal static partial void RedisOutputCacheGCTransientFault(ILogger logger, Exception exception);
+
+ [LoggerMessage(2, LogLevel.Error, "Fatal error occurred executing redis output-cache GC loop.", EventName = "RedisOutputCacheGCFatalError")]
+ internal static partial void RedisOutputCacheGCFatalError(ILogger logger, Exception exception);
+}
+#endif
diff --git a/src/Caching/StackExchangeRedis/src/RedisOutputCacheStore.cs b/src/Caching/StackExchangeRedis/src/RedisOutputCacheStore.cs
index 83dc107adfb3..b8d5d55106cc 100644
--- a/src/Caching/StackExchangeRedis/src/RedisOutputCacheStore.cs
+++ b/src/Caching/StackExchangeRedis/src/RedisOutputCacheStore.cs
@@ -21,7 +21,7 @@
namespace Microsoft.Extensions.Caching.StackExchangeRedis;
-internal class RedisOutputCacheStore : IOutputCacheStore, IOutputCacheBufferStore, IDisposable
+internal partial class RedisOutputCacheStore : IOutputCacheStore, IOutputCacheBufferStore, IDisposable
{
private readonly RedisCacheOptions _options;
private readonly ILogger _logger;
@@ -114,14 +114,14 @@ private async Task RunGarbageCollectionLoopAsync()
catch (Exception ex)
{
// this sweep failed; log it
- _logger.LogDebug(ex, "Transient error occurred executing redis output-cache GC loop");
+ RedisOutputCacheGCTransientFault(_logger, ex);
}
}
}
catch (Exception ex)
{
// the entire loop is dead
- _logger.LogDebug(ex, "Fatal error occurred executing redis output-cache GC loop");
+ RedisOutputCacheGCFatalError(_logger, ex);
}
}
diff --git a/src/Caching/StackExchangeRedis/src/StackExchangeRedisCacheServiceCollectionExtensions.cs b/src/Caching/StackExchangeRedis/src/StackExchangeRedisCacheServiceCollectionExtensions.cs
index d6fca729bf20..a3c2fca998d5 100644
--- a/src/Caching/StackExchangeRedis/src/StackExchangeRedisCacheServiceCollectionExtensions.cs
+++ b/src/Caching/StackExchangeRedis/src/StackExchangeRedisCacheServiceCollectionExtensions.cs
@@ -51,6 +51,8 @@ public static IServiceCollection AddStackExchangeRedisOutputCache(this IServiceC
services.Configure(setupAction);
// replace here (Add vs TryAdd) is intentional and part of test conditions
+ // long-form name qualification is because of the #if conditional; we'd need a matchin #if around
+ // a using directive, which is messy
services.AddSingleton();
return services;
diff --git a/src/Middleware/OutputCaching/OutputCaching.slnf b/src/Middleware/OutputCaching/OutputCaching.slnf
index 872e454585eb..f7c567553ab5 100644
--- a/src/Middleware/OutputCaching/OutputCaching.slnf
+++ b/src/Middleware/OutputCaching/OutputCaching.slnf
@@ -4,6 +4,7 @@
"projects": [
"src\\Caching\\StackExchangeRedis\\src\\Microsoft.Extensions.Caching.StackExchangeRedis.csproj",
"src\\Caching\\StackExchangeRedis\\test\\Microsoft.Extensions.Caching.StackExchangeRedis.Tests.csproj",
+ "src\\Middleware\\OutputCaching\\perf\\Microbenchmarks\\Microsoft.AspNetCore.OutputCaching.Microbenchmarks.csproj",
"src\\Middleware\\OutputCaching\\samples\\OutputCachingSample\\OutputCachingSample.csproj",
"src\\Middleware\\OutputCaching\\src\\Microsoft.AspNetCore.OutputCaching.csproj",
"src\\Middleware\\OutputCaching\\test\\Microsoft.AspNetCore.OutputCaching.Tests.csproj"
diff --git a/src/Middleware/OutputCaching/perf/Microbenchmarks/AssemblyInfo.cs b/src/Middleware/OutputCaching/perf/Microbenchmarks/AssemblyInfo.cs
new file mode 100644
index 000000000000..09f49228e9e6
--- /dev/null
+++ b/src/Middleware/OutputCaching/perf/Microbenchmarks/AssemblyInfo.cs
@@ -0,0 +1,4 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+[assembly: BenchmarkDotNet.Attributes.AspNetCoreBenchmark]
diff --git a/src/Middleware/OutputCaching/perf/Microbenchmarks/EndToEndBenchmarks.cs b/src/Middleware/OutputCaching/perf/Microbenchmarks/EndToEndBenchmarks.cs
new file mode 100644
index 000000000000..fcc4226fa84c
--- /dev/null
+++ b/src/Middleware/OutputCaching/perf/Microbenchmarks/EndToEndBenchmarks.cs
@@ -0,0 +1,286 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+#nullable enable
+
+using System.Buffers;
+using System.IO.Pipelines;
+using BenchmarkDotNet.Attributes;
+using BenchmarkDotNet.Configs;
+using Microsoft.AspNetCore.Http;
+using Microsoft.Extensions.Logging.Abstractions;
+using Microsoft.Net.Http.Headers;
+
+namespace Microsoft.AspNetCore.OutputCaching.Microbenchmarks;
+
+[MemoryDiagnoser, GroupBenchmarksBy(BenchmarkLogicalGroupRule.ByCategory), CategoriesColumn]
+public class EndToEndBenchmarks
+{
+ [Params(10, 1000, (64 * 1024) + 17, (256 * 1024) + 17)]
+ public int PayloadLength { get; set; } = 1024; // default for simple runs
+
+ private byte[] _payloadOversized = Array.Empty();
+ private string Key = "";
+ private IOutputCacheStore _store = null!;
+
+ private static readonly OutputCacheOptions _options = new();
+ private static readonly Action _noop = () => { };
+
+ private static readonly HashSet _tags = new();
+ private static IHeaderDictionary _headers = null!;
+
+ private ReadOnlyMemory Payload => new(_payloadOversized, 0, PayloadLength);
+
+ [GlobalCleanup]
+ public void Cleanup()
+ {
+ var arr = _payloadOversized;
+ _payloadOversized = Array.Empty();
+ if (arr.Length != 0)
+ {
+ ArrayPool.Shared.Return(arr);
+ }
+ _store = null!;
+ _headers = null!;
+ }
+
+ [GlobalSetup]
+ public async Task InitAsync()
+ {
+ Key = Guid.NewGuid().ToString();
+ _store = new DummyStore(Key);
+ _payloadOversized = ArrayPool.Shared.Rent(PayloadLength);
+ Random.Shared.NextBytes(_payloadOversized);
+ // some random headers from ms.com
+ _headers = new HeaderDictionary
+ {
+ ContentLength = PayloadLength,
+ ["X-Rtag"] = "AEM_PROD_Marketing",
+ ["X-Vhost"] = "publish_microsoft_s",
+ };
+ _headers.ContentType = "text/html;charset=utf-8";
+ _headers.Vary = "Accept-Encoding";
+ _headers.XContentTypeOptions = "nosniff";
+ _headers.XFrameOptions = "SAMEORIGIN";
+ _headers.RequestId = Key;
+
+ // store, fetch, validate (for each impl)
+ await StreamSync();
+ await ReadAsync(true);
+
+ await StreamAsync();
+ await ReadAsync(true);
+
+ await WriterAsync();
+ await ReadAsync(true);
+ }
+
+ static void WriteInRandomChunks(ReadOnlySpan value, Stream destination)
+ {
+ var rand = Random.Shared;
+ while (!value.IsEmpty)
+ {
+ var bytes = Math.Min(rand.Next(4, 1024), value.Length);
+ destination.Write(value.Slice(0, bytes));
+ value = value.Slice(bytes);
+ }
+ destination.Flush();
+ }
+
+ static Task WriteInRandomChunks(ReadOnlyMemory source, PipeWriter destination, CancellationToken cancellationToken)
+ {
+ var value = source.Span;
+ var rand = Random.Shared;
+ while (!value.IsEmpty)
+ {
+ var bytes = Math.Min(rand.Next(4, 1024), value.Length);
+ var span = destination.GetSpan(bytes);
+ bytes = Math.Min(bytes, span.Length);
+ value.Slice(0, bytes).CopyTo(span);
+ destination.Advance(bytes);
+ value = value.Slice(bytes);
+ }
+ return destination.FlushAsync(cancellationToken).AsTask();
+ }
+
+ static async Task WriteInRandomChunksAsync(ReadOnlyMemory value, Stream destination, CancellationToken cancellationToken)
+ {
+ var rand = Random.Shared;
+ while (!value.IsEmpty)
+ {
+ var bytes = Math.Min(rand.Next(4, 1024), value.Length);
+ await destination.WriteAsync(value.Slice(0, bytes), cancellationToken);
+ value = value.Slice(bytes);
+ }
+ await destination.FlushAsync(cancellationToken);
+ }
+
+ [Benchmark(Description = "StreamSync"), BenchmarkCategory("Write")]
+ public async Task StreamSync()
+ {
+ ReadOnlySequence body;
+ using (var oc = new OutputCacheStream(Stream.Null, _options.MaximumBodySize, StreamUtilities.BodySegmentSize, _noop))
+ {
+ WriteInRandomChunks(Payload.Span, oc);
+ body = oc.GetCachedResponseBody();
+ }
+ var entry = new OutputCacheEntry(DateTimeOffset.UtcNow, StatusCodes.Status200OK)
+ .CopyHeadersFrom(_headers);
+ entry.SetBody(body, recycleBuffers: true);
+ await OutputCacheEntryFormatter.StoreAsync(Key, entry, _tags, _options.DefaultExpirationTimeSpan, _store, NullLogger.Instance, CancellationToken.None);
+ entry.Dispose();
+ }
+
+ [Benchmark(Description = "StreamAsync"), BenchmarkCategory("Write")]
+ public async Task StreamAsync()
+ {
+ ReadOnlySequence body;
+ using (var oc = new OutputCacheStream(Stream.Null, _options.MaximumBodySize, StreamUtilities.BodySegmentSize, _noop))
+ {
+ await WriteInRandomChunksAsync(Payload, oc, CancellationToken.None);
+ body = oc.GetCachedResponseBody();
+ }
+ var entry = new OutputCacheEntry(DateTimeOffset.UtcNow, StatusCodes.Status200OK)
+ .CopyHeadersFrom(_headers);
+ entry.SetBody(body, recycleBuffers: true);
+ await OutputCacheEntryFormatter.StoreAsync(Key, entry, _tags, _options.DefaultExpirationTimeSpan, _store, NullLogger.Instance, CancellationToken.None);
+ entry.Dispose();
+ }
+
+ [Benchmark(Description = "BodyWriter"), BenchmarkCategory("Write")]
+ public async Task WriterAsync()
+ {
+ ReadOnlySequence body;
+ using (var oc = new OutputCacheStream(Stream.Null, _options.MaximumBodySize, StreamUtilities.BodySegmentSize, _noop))
+ {
+ var pipe = PipeWriter.Create(oc, new StreamPipeWriterOptions(leaveOpen: true));
+ await WriteInRandomChunks(Payload, pipe, CancellationToken.None);
+ body = oc.GetCachedResponseBody();
+ }
+ var entry = new OutputCacheEntry(DateTimeOffset.UtcNow, StatusCodes.Status200OK)
+ .CopyHeadersFrom(_headers);
+ entry.SetBody(body, recycleBuffers: true);
+ await OutputCacheEntryFormatter.StoreAsync(Key, entry, _tags, _options.DefaultExpirationTimeSpan, _store, NullLogger.Instance, CancellationToken.None);
+ entry.Dispose();
+ }
+
+ [Benchmark, BenchmarkCategory("Read")]
+ public Task ReadAsync() => ReadAsync(false);
+
+ private async Task ReadAsync(bool validate)
+ {
+ static void ThrowNotFound() => throw new KeyNotFoundException();
+
+ var entry = await OutputCacheEntryFormatter.GetAsync(Key, _store, CancellationToken.None);
+ if (validate)
+ {
+ Validate(entry!);
+ }
+ if (entry is null)
+ {
+ ThrowNotFound();
+ }
+ else
+ {
+ entry.Dispose();
+ }
+ }
+
+ private void Validate(OutputCacheEntry value)
+ {
+ ArgumentNullException.ThrowIfNull(value);
+ var body = value.Body;
+ if (body.Length != PayloadLength)
+ {
+ throw new InvalidOperationException("Invalid payload length");
+ }
+
+ if (body.IsSingleSegment)
+ {
+ if (!Payload.Span.SequenceEqual(body.FirstSpan))
+ {
+ throw new InvalidOperationException("Invalid payload");
+ }
+ }
+ else
+ {
+ var oversized = ArrayPool.Shared.Rent(PayloadLength);
+ value.Body.CopyTo(oversized);
+ if (!Payload.Span.SequenceEqual(new(oversized, 0, PayloadLength)))
+ {
+ throw new InvalidOperationException("Invalid payload");
+ }
+
+ ArrayPool.Shared.Return(oversized);
+ }
+
+ if (value.Headers.Length != _headers.Count - 2)
+ {
+ throw new InvalidOperationException("Incorrect header count");
+ }
+ foreach (var header in _headers)
+ {
+ if (header.Key == HeaderNames.ContentLength || header.Key == HeaderNames.RequestId)
+ {
+ // not stored
+ continue;
+ }
+ if (!value.TryFindHeader(header.Key, out var vals) || vals != header.Value)
+ {
+ throw new InvalidOperationException("Invalid header: " + header.Key);
+ }
+ }
+ }
+
+ sealed class DummyStore : IOutputCacheStore
+ {
+ private readonly string _key;
+ private byte[]? _payload;
+ public DummyStore(string key) => _key = key;
+
+ ValueTask IOutputCacheStore.EvictByTagAsync(string tag, CancellationToken cancellationToken) => default;
+
+ ValueTask IOutputCacheStore.GetAsync(string key, CancellationToken cancellationToken)
+ {
+ if (key != _key)
+ {
+ Throw();
+ }
+ return new(_payload);
+ }
+
+ ValueTask IOutputCacheStore.SetAsync(string key, byte[]? value, string[]? tags, TimeSpan validFor, CancellationToken cancellationToken)
+ {
+ if (key != _key)
+ {
+ Throw();
+ }
+ _payload = value;
+ return default;
+ }
+
+ static void Throw() => throw new InvalidOperationException("Incorrect key");
+ }
+
+ internal sealed class NullPipeWriter : PipeWriter, IDisposable
+ {
+ public void Dispose()
+ {
+ var arr = _buffer;
+ _buffer = null!;
+ if (arr is not null)
+ {
+ ArrayPool.Shared.Return(arr);
+ }
+ }
+ byte[] _buffer;
+ public NullPipeWriter(int size) => _buffer = ArrayPool.Shared.Rent(size);
+ public override void Advance(int bytes) { }
+ public override Span GetSpan(int sizeHint = 0) => _buffer;
+ public override Memory GetMemory(int sizeHint = 0) => _buffer;
+ public override void Complete(Exception? exception = null) { }
+ public override void CancelPendingFlush() { }
+ public override ValueTask CompleteAsync(Exception? exception = null) => default;
+ public override ValueTask FlushAsync(CancellationToken cancellationToken = default) => default;
+ }
+}
diff --git a/src/Middleware/OutputCaching/perf/Microbenchmarks/Microsoft.AspNetCore.OutputCaching.Microbenchmarks.csproj b/src/Middleware/OutputCaching/perf/Microbenchmarks/Microsoft.AspNetCore.OutputCaching.Microbenchmarks.csproj
new file mode 100644
index 000000000000..419f60a6c948
--- /dev/null
+++ b/src/Middleware/OutputCaching/perf/Microbenchmarks/Microsoft.AspNetCore.OutputCaching.Microbenchmarks.csproj
@@ -0,0 +1,19 @@
+
+
+
+ Exe
+ net8.0
+ enable
+ true
+ $(DefineConstants);IS_BENCHMARKS
+
+
+
+
+
+
+
+
+
+
+
diff --git a/src/Middleware/OutputCaching/src/CacheEntryHelpers.cs b/src/Middleware/OutputCaching/src/CacheEntryHelpers.cs
index e7fdcf1bcbf3..9ae706880a1d 100644
--- a/src/Middleware/OutputCaching/src/CacheEntryHelpers.cs
+++ b/src/Middleware/OutputCaching/src/CacheEntryHelpers.cs
@@ -20,19 +20,13 @@ internal static long EstimateCachedResponseSize(OutputCacheEntry cachedResponse)
long size = sizeof(int);
// Headers
- if (cachedResponse.Headers != null)
+ foreach (var item in cachedResponse.Headers.Span)
{
- foreach (var item in cachedResponse.Headers)
- {
- size += (item.Key.Length * sizeof(char)) + EstimateStringValuesSize(item.Value);
- }
+ size += (item.Name.Length * sizeof(char)) + EstimateStringValuesSize(item.Value);
}
// Body
- if (cachedResponse.Body != null)
- {
- size += cachedResponse.Body.Length;
- }
+ size += cachedResponse.Body.Length;
return size;
}
diff --git a/src/Middleware/OutputCaching/src/CachedResponseBody.cs b/src/Middleware/OutputCaching/src/CachedResponseBody.cs
deleted file mode 100644
index dd46a5ece715..000000000000
--- a/src/Middleware/OutputCaching/src/CachedResponseBody.cs
+++ /dev/null
@@ -1,53 +0,0 @@
-// Licensed to the .NET Foundation under one or more agreements.
-// The .NET Foundation licenses this file to you under the MIT license.
-
-using System.IO.Pipelines;
-
-namespace Microsoft.AspNetCore.OutputCaching;
-
-///
-/// Represents a cached response body.
-///
-internal sealed class CachedResponseBody
-{
- ///
- /// Creates a new instance.
- ///
- /// The segments.
- /// The length.
- public CachedResponseBody(List segments, long length)
- {
- ArgumentNullException.ThrowIfNull(segments);
-
- Segments = segments;
- Length = length;
- }
-
- ///
- /// Gets the segments of the body.
- ///
- public List Segments { get; }
-
- ///
- /// Gets the length of the body.
- ///
- public long Length { get; }
-
- ///
- /// Copies the body to a .
- ///
- /// The destination
- /// The cancellation token.
- ///
- public async Task CopyToAsync(PipeWriter destination, CancellationToken cancellationToken)
- {
- ArgumentNullException.ThrowIfNull(destination);
-
- foreach (var segment in Segments)
- {
- cancellationToken.ThrowIfCancellationRequested();
-
- await destination.WriteAsync(segment, cancellationToken);
- }
- }
-}
diff --git a/src/Middleware/OutputCaching/src/FormatterBinaryReader.cs b/src/Middleware/OutputCaching/src/FormatterBinaryReader.cs
new file mode 100644
index 000000000000..000c25fedd4f
--- /dev/null
+++ b/src/Middleware/OutputCaching/src/FormatterBinaryReader.cs
@@ -0,0 +1,197 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System.Diagnostics.CodeAnalysis;
+using System.Runtime.CompilerServices;
+using System.Runtime.InteropServices;
+using System.Text;
+
+namespace Microsoft.AspNetCore.OutputCaching;
+
+internal ref struct FormatterBinaryReader
+{
+ // this is effectively a cut-down re-implementation of BinaryReader
+ // from https://github.com/dotnet/runtime/blob/3689fbec921418e496962dc0ee252bdc9eafa3de/src/libraries/System.Private.CoreLib/src/System/IO/BinaryReader.cs
+ // and is byte-compatible; however, instead of working against a Stream, we work against a ReadOnlyMemory
+ //
+ // additionally, we add support for reading a string with length specified by the caller (rather than handled automatically),
+ // and in-place (zero-copy) BLOB reads
+
+ private readonly ReadOnlyMemory _original; // used to allow us to zero-copy chunks out of the payload
+ private readonly ref byte _root;
+ private readonly int _length;
+ private int _offset;
+
+ public bool IsEOF => _offset >= _length;
+
+ public FormatterBinaryReader(ReadOnlyMemory content)
+ {
+ _original = content;
+ _length = content.Length;
+ _root = ref MemoryMarshal.GetReference(content.Span);
+ }
+
+ public byte ReadByte()
+ {
+ if (IsEOF)
+ {
+ ThrowEndOfStream();
+ }
+ return Unsafe.Add(ref _root, _offset++);
+ }
+
+ public int Read7BitEncodedInt()
+ {
+ // Unlike writing, we can't delegate to the 64-bit read on
+ // 64-bit platforms. The reason for this is that we want to
+ // stop consuming bytes if we encounter an integer overflow.
+
+ uint result = 0;
+ byte byteReadJustNow;
+
+ // Read the integer 7 bits at a time. The high bit
+ // of the byte when on means to continue reading more bytes.
+ //
+ // There are two failure cases: we've read more than 5 bytes,
+ // or the fifth byte is about to cause integer overflow.
+ // This means that we can read the first 4 bytes without
+ // worrying about integer overflow.
+
+ const int MaxBytesWithoutOverflow = 4;
+ for (int shift = 0; shift < MaxBytesWithoutOverflow * 7; shift += 7)
+ {
+ // ReadByte handles end of stream cases for us.
+ byteReadJustNow = ReadByte();
+ result |= (byteReadJustNow & 0x7Fu) << shift;
+
+ if (byteReadJustNow <= 0x7Fu)
+ {
+ return (int)result; // early exit
+ }
+ }
+
+ // Read the 5th byte. Since we already read 28 bits,
+ // the value of this byte must fit within 4 bits (32 - 28),
+ // and it must not have the high bit set.
+
+ byteReadJustNow = ReadByte();
+ if (byteReadJustNow > 0b_1111u)
+ {
+ ThrowOverflowException();
+ }
+
+ result |= (uint)byteReadJustNow << (MaxBytesWithoutOverflow * 7);
+ return (int)result;
+ }
+
+ public long Read7BitEncodedInt64()
+ {
+ ulong result = 0;
+ byte byteReadJustNow;
+
+ // Read the integer 7 bits at a time. The high bit
+ // of the byte when on means to continue reading more bytes.
+ //
+ // There are two failure cases: we've read more than 10 bytes,
+ // or the tenth byte is about to cause integer overflow.
+ // This means that we can read the first 9 bytes without
+ // worrying about integer overflow.
+
+ const int MaxBytesWithoutOverflow = 9;
+ for (int shift = 0; shift < MaxBytesWithoutOverflow * 7; shift += 7)
+ {
+ // ReadByte handles end of stream cases for us.
+ byteReadJustNow = ReadByte();
+ result |= (byteReadJustNow & 0x7Ful) << shift;
+
+ if (byteReadJustNow <= 0x7Fu)
+ {
+ return (long)result; // early exit
+ }
+ }
+
+ // Read the 10th byte. Since we already read 63 bits,
+ // the value of this byte must fit within 1 bit (64 - 63),
+ // and it must not have the high bit set.
+
+ byteReadJustNow = ReadByte();
+ if (byteReadJustNow > 0b_1u)
+ {
+ ThrowOverflowException();
+ }
+
+ result |= (ulong)byteReadJustNow << (MaxBytesWithoutOverflow * 7);
+ return (long)result;
+ }
+
+ public string ReadString() => ReadString(Read7BitEncodedInt());
+
+ public void SkipString() => Skip(Read7BitEncodedInt());
+
+ public string ReadString(int bytes)
+ {
+ ArgumentOutOfRangeException.ThrowIfNegative(bytes);
+
+ if (_offset > _length - bytes)
+ {
+ ThrowEndOfStream();
+ }
+ if (bytes == 0)
+ {
+ return "";
+ }
+ var s = Encoding.UTF8.GetString(MemoryMarshal.CreateReadOnlySpan(ref Unsafe.Add(ref _root, _offset), bytes));
+ _offset += bytes;
+ return s;
+ }
+
+ public void Skip(int bytes)
+ {
+ ArgumentOutOfRangeException.ThrowIfNegative(bytes);
+ if (_offset > _length - bytes)
+ {
+ ThrowEndOfStream();
+ }
+ _offset += bytes;
+ }
+
+ public ReadOnlySpan ReadBytesSpan(int count)
+ {
+ ArgumentOutOfRangeException.ThrowIfNegative(count);
+
+ if (_offset > _length - count)
+ {
+ ThrowEndOfStream();
+ }
+ if (count == 0)
+ {
+ return default;
+ }
+ var result = MemoryMarshal.CreateReadOnlySpan(ref Unsafe.Add(ref _root, _offset), count);
+ _offset += count;
+ return result;
+ }
+
+ public ReadOnlyMemory ReadBytesMemory(int count)
+ {
+ ArgumentOutOfRangeException.ThrowIfNegative(count);
+
+ if (_offset > _length - count)
+ {
+ ThrowEndOfStream();
+ }
+ if (count == 0)
+ {
+ return default;
+ }
+ var result = _original.Slice(_offset, count);
+ _offset += count;
+ return result;
+ }
+
+ [DoesNotReturn]
+ private static void ThrowEndOfStream() => throw new EndOfStreamException();
+
+ [DoesNotReturn]
+ private static void ThrowOverflowException() => throw new OverflowException();
+}
diff --git a/src/Middleware/OutputCaching/src/FormatterBinaryWriter.cs b/src/Middleware/OutputCaching/src/FormatterBinaryWriter.cs
new file mode 100644
index 000000000000..c85df1d87427
--- /dev/null
+++ b/src/Middleware/OutputCaching/src/FormatterBinaryWriter.cs
@@ -0,0 +1,207 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System.Buffers;
+using System.Diagnostics;
+using System.Runtime.CompilerServices;
+using System.Runtime.InteropServices;
+using System.Text;
+
+namespace Microsoft.AspNetCore.OutputCaching;
+
+internal ref struct FormatterBinaryWriter
+{
+ // this is effectively a cut-down re-implementation of BinaryWriter
+ // from https://github.com/dotnet/runtime/blob/3689fbec921418e496962dc0ee252bdc9eafa3de/src/libraries/System.Private.CoreLib/src/System/IO/BinaryWriter.cs
+ // and is byte-compatible; however, instead of working against a Stream, we work against a IBufferWriter
+ //
+ // note it also has APIs for writing raw BLOBs
+
+ private readonly IBufferWriter target;
+ private int offset, length;
+ private ref byte root;
+
+ public FormatterBinaryWriter(IBufferWriter target)
+ {
+ ArgumentNullException.ThrowIfNull(target);
+ this.target = target;
+ root = ref Unsafe.NullRef(); // no buffer initially
+ offset = length = 0;
+ DebugAssertValid();
+ }
+
+ private Span AvailableBuffer
+ {
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ get
+ {
+ DebugAssertValid();
+ return MemoryMarshal.CreateSpan(ref Unsafe.Add(ref root, offset), length - offset);
+ }
+ }
+
+ [Conditional("DEBUG")]
+ private void DebugAssertValid()
+ {
+ Debug.Assert(target is not null);
+ if (Unsafe.IsNullRef(ref root))
+ {
+ // no buffer; expect all zeros
+ Debug.Assert(length == 0 && offset == 0);
+ }
+ else
+ {
+ // have buffer; expect valid offset and positive length
+ Debug.Assert(offset >= 0 && offset <= length);
+ Debug.Assert(length > 0);
+ }
+
+ }
+
+ // Writes a byte to this stream. The current position of the stream is
+ // advanced by one.
+ //
+ public void Write(byte value)
+ {
+ if (offset < length)
+ {
+ Unsafe.Add(ref root, offset++) = value;
+ }
+ else
+ {
+ SlowWrite(value);
+ }
+ DebugAssertValid();
+ }
+
+ public void Write(string value) => Write(value, 0);
+
+ internal void Write(string value, int lengthShift)
+ {
+ ArgumentNullException.ThrowIfNull(value);
+
+ if (value.Length == 0)
+ {
+ Write(0); // length prefix
+ return;
+ }
+
+ var bytes = Encoding.UTF8.GetByteCount(value);
+ Write7BitEncodedInt(bytes << lengthShift); // length prefix
+ if (bytes <= length - offset)
+ {
+ var actual = Encoding.UTF8.GetBytes(value, AvailableBuffer);
+ Debug.Assert(actual == bytes);
+ offset += bytes;
+ }
+ else
+ {
+ Flush();
+ // get the encoding to do the heavy lifting directly
+ var actual = Encoding.UTF8.GetBytes(value, target);
+ Debug.Assert(actual == bytes);
+ }
+ DebugAssertValid();
+ }
+
+ private void RequestNewBuffer()
+ {
+ Flush();
+ var span = target.GetSpan(1024); // fairly arbitrary non-trivial buffer; we can explore larger if useful
+ if (span.IsEmpty)
+ {
+ Throw();
+ }
+ offset = 0;
+ length = span.Length;
+ root = ref MemoryMarshal.GetReference(span);
+
+ DebugAssertValid();
+ static void Throw() => throw new InvalidOperationException("Unable to acquire non-empty write buffer");
+ }
+
+ public void Flush() // commits the current buffer and leave in a buffer-free state
+ {
+ if (!Unsafe.IsNullRef(ref root))
+ {
+ target.Advance(offset);
+ length = offset = 0;
+ root = ref Unsafe.NullRef();
+ }
+ DebugAssertValid();
+ }
+
+ private void SlowWrite(byte value)
+ {
+ RequestNewBuffer();
+ Unsafe.Add(ref root, offset++) = value;
+ }
+
+ public void Write7BitEncodedInt(int value)
+ {
+ uint uValue = (uint)value;
+
+ // Write out an int 7 bits at a time. The high bit of the byte,
+ // when on, tells reader to continue reading more bytes.
+ //
+ // Using the constants 0x7F and ~0x7F below offers smaller
+ // codegen than using the constant 0x80.
+
+ while (uValue > 0x7Fu)
+ {
+ Write((byte)(uValue | ~0x7Fu));
+ uValue >>= 7;
+ }
+
+ Write((byte)uValue);
+ }
+
+ public void Write7BitEncodedInt64(long value)
+ {
+ ulong uValue = (ulong)value;
+
+ // Write out an int 7 bits at a time. The high bit of the byte,
+ // when on, tells reader to continue reading more bytes.
+ //
+ // Using the constants 0x7F and ~0x7F below offers smaller
+ // codegen than using the constant 0x80.
+
+ while (uValue > 0x7Fu)
+ {
+ Write((byte)((uint)uValue | ~0x7Fu));
+ uValue >>= 7;
+ }
+
+ Write((byte)uValue);
+ }
+
+ public void WriteRaw(scoped ReadOnlySpan value)
+ {
+ if (value.IsEmpty)
+ { } // nothing to do
+ else if ((offset + value.Length) <= length)
+ {
+ value.CopyTo(AvailableBuffer);
+ offset += value.Length;
+ }
+ else
+ {
+ SlowWriteRaw(value);
+ }
+ DebugAssertValid();
+ }
+
+ private void SlowWriteRaw(scoped ReadOnlySpan value)
+ {
+ do
+ {
+ RequestNewBuffer();
+ var available = AvailableBuffer;
+ var toWrite = Math.Min(value.Length, available.Length);
+ value.Slice(start: 0, length: toWrite).CopyTo(available);
+ offset += toWrite;
+ value = value.Slice(start: toWrite);
+ }
+ while (!value.IsEmpty);
+ }
+}
diff --git a/src/Middleware/OutputCaching/src/IOutputCacheBufferStore.cs b/src/Middleware/OutputCaching/src/IOutputCacheBufferStore.cs
new file mode 100644
index 000000000000..a1cd3426fb23
--- /dev/null
+++ b/src/Middleware/OutputCaching/src/IOutputCacheBufferStore.cs
@@ -0,0 +1,33 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System.Buffers;
+using System.IO.Pipelines;
+
+namespace Microsoft.AspNetCore.OutputCaching;
+
+///
+/// Represents a store for cached responses that uses a as the target.
+///
+public interface IOutputCacheBufferStore : IOutputCacheStore
+{
+ ///
+ /// Gets the cached response for the given key, if it exists.
+ /// If no cached response exists for the given key, null is returned.
+ ///
+ /// The cache key to look up.
+ /// The location to which the value should be written.
+ /// Indicates that the operation should be cancelled.
+ /// True if the response cache entry if it exists; otherwise False.
+ ValueTask TryGetAsync(string key, PipeWriter destination, CancellationToken cancellationToken);
+
+ ///
+ /// Stores the given response in the response cache.
+ ///
+ /// The cache key to store the response under.
+ /// The response cache entry to store; this value is only defined for the duration of the method, and should not be stored without making a copy.
+ /// The tags associated with the cache entry to store.
+ /// The amount of time the entry will be kept in the cache before expiring, relative to now.
+ /// Indicates that the operation should be cancelled.
+ ValueTask SetAsync(string key, ReadOnlySequence value, ReadOnlyMemory tags, TimeSpan validFor, CancellationToken cancellationToken);
+}
diff --git a/src/Middleware/OutputCaching/src/IOutputCacheStore.cs b/src/Middleware/OutputCaching/src/IOutputCacheStore.cs
index 9b9e0738cb4b..ffba017ef6f9 100644
--- a/src/Middleware/OutputCaching/src/IOutputCacheStore.cs
+++ b/src/Middleware/OutputCaching/src/IOutputCacheStore.cs
@@ -1,9 +1,6 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
-using System.Buffers;
-using System.IO.Pipelines;
-
namespace Microsoft.AspNetCore.OutputCaching;
///
@@ -37,29 +34,3 @@ public interface IOutputCacheStore
/// Indicates that the operation should be cancelled.
ValueTask SetAsync(string key, byte[] value, string[]? tags, TimeSpan validFor, CancellationToken cancellationToken);
}
-
-///
-/// Represents a store for cached responses that uses a as the target.
-///
-public interface IOutputCacheBufferStore : IOutputCacheStore
-{
- ///
- /// Gets the cached response for the given key, if it exists.
- /// If no cached response exists for the given key, null is returned.
- ///
- /// The cache key to look up.
- /// The location to which the value should be written.
- /// Indicates that the operation should be cancelled.
- /// True if the response cache entry if it exists; otherwise False.
- ValueTask TryGetAsync(string key, PipeWriter destination, CancellationToken cancellationToken);
-
- ///
- /// Stores the given response in the response cache.
- ///
- /// The cache key to store the response under.
- /// The response cache entry to store; this value is only defined for the duration of the method, and should not be stored without making a copy.
- /// The tags associated with the cache entry to store.
- /// The amount of time the entry will be kept in the cache before expiring, relative to now.
- /// Indicates that the operation should be cancelled.
- ValueTask SetAsync(string key, ReadOnlySequence value, ReadOnlyMemory tags, TimeSpan validFor, CancellationToken cancellationToken);
-}
diff --git a/src/Middleware/OutputCaching/src/Microsoft.AspNetCore.OutputCaching.csproj b/src/Middleware/OutputCaching/src/Microsoft.AspNetCore.OutputCaching.csproj
index c5c29d01ebad..a4af1d73757d 100644
--- a/src/Middleware/OutputCaching/src/Microsoft.AspNetCore.OutputCaching.csproj
+++ b/src/Middleware/OutputCaching/src/Microsoft.AspNetCore.OutputCaching.csproj
@@ -10,6 +10,7 @@
+
@@ -21,7 +22,6 @@
-
diff --git a/src/Middleware/OutputCaching/src/OutputCacheContext.cs b/src/Middleware/OutputCaching/src/OutputCacheContext.cs
index be51451ec169..5360e2adc067 100644
--- a/src/Middleware/OutputCaching/src/OutputCacheContext.cs
+++ b/src/Middleware/OutputCaching/src/OutputCacheContext.cs
@@ -67,7 +67,14 @@ public OutputCacheContext()
internal TimeSpan CachedEntryAge { get; set; }
- internal OutputCacheEntry CachedResponse { get; set; } = default!;
+ internal OutputCacheEntry? CachedResponse { get; set; }
+
+ internal void ReleaseCachedResponse()
+ {
+ var tmp = CachedResponse;
+ CachedResponse = null;
+ tmp?.Dispose();
+ }
internal bool ResponseStarted { get; set; }
diff --git a/src/Middleware/OutputCaching/src/OutputCacheEntry.cs b/src/Middleware/OutputCaching/src/OutputCacheEntry.cs
index 7a9124ac79c5..1de8ccbb0e81 100644
--- a/src/Middleware/OutputCaching/src/OutputCacheEntry.cs
+++ b/src/Middleware/OutputCaching/src/OutputCacheEntry.cs
@@ -1,34 +1,145 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
+using System.Buffers;
+using System.IO.Pipelines;
+using System.Runtime.InteropServices;
using Microsoft.AspNetCore.Http;
+using Microsoft.Extensions.Primitives;
+using Microsoft.Net.Http.Headers;
namespace Microsoft.AspNetCore.OutputCaching;
-internal sealed class OutputCacheEntry
+internal sealed class OutputCacheEntry : IDisposable
{
+ public OutputCacheEntry(DateTimeOffset created, int statusCode)
+ {
+ Created = created;
+ StatusCode = statusCode;
+ }
+
+ private bool _recycleBuffers; // does this instance own the memory behind the segments?
+
+ public StringValues FindHeader(string key)
+ {
+ TryFindHeader(key, out var value);
+ return value;
+ }
+
+ public bool TryFindHeader(string key, out StringValues values)
+ {
+ foreach (var header in Headers.Span)
+ {
+ if (string.Equals(key, header.Name, StringComparison.OrdinalIgnoreCase))
+ {
+ values = header.Value;
+ return true;
+ }
+ }
+ values = StringValues.Empty;
+ return false;
+ }
+
///
/// Gets the created date and time of the cache entry.
///
- public DateTimeOffset Created { get; set; }
+ public DateTimeOffset Created { get; }
///
/// Gets the status code of the cache entry.
///
- public int StatusCode { get; set; }
+ public int StatusCode { get; }
///
/// Gets the headers of the cache entry.
///
- public HeaderDictionary? Headers { get; set; }
+ public ReadOnlyMemory<(string Name, StringValues Value)> Headers { get; private set; }
+
+ // this is intentionally not an internal setter to make it clear that this should not be
+ // used from most scenarios; this should consider buffer reuse - you *probably* want CopyFrom
+ internal void SetHeaders(ReadOnlyMemory<(string Name, StringValues Value)> value) => Headers = value;
///
/// Gets the body of the cache entry.
///
- public CachedResponseBody? Body { get; set; }
+ public ReadOnlySequence Body { get; private set; }
- ///
- /// Gets the tags of the cache entry.
- ///
- public string[] Tags { get; set; } = Array.Empty();
+ // this is intentionally not an internal setter to make it clear that this should not be
+ // used from most scenarios; this should consider buffer reuse - you *probably* want CopyFrom
+ internal void SetBody(ReadOnlySequence value, bool recycleBuffers)
+ {
+ Body = value;
+ _recycleBuffers = recycleBuffers;
+ }
+
+ public void Dispose()
+ {
+ var headers = Headers;
+ var body = Body;
+ Headers = default;
+ Body = default;
+ Recycle(headers);
+ RecyclableReadOnlySequenceSegment.RecycleChain(body, _recycleBuffers);
+ // ^^ note that this only recycles the chain, not the actual buffers
+ }
+
+ private static void Recycle(ReadOnlyMemory value)
+ {
+ if (MemoryMarshal.TryGetArray(value, out var segment) && segment.Array is { Length: > 0 })
+ {
+ ArrayPool.Shared.Return(segment.Array);
+ }
+ }
+
+ internal OutputCacheEntry CreateBodyFrom(IList segments) // mainly used from tests
+ {
+ // only expected in create path; don't reset/recycle existing
+ Body = RecyclableReadOnlySequenceSegment.CreateSequence(segments);
+ return this;
+ }
+
+ internal OutputCacheEntry CopyHeadersFrom(IHeaderDictionary headers)
+ {
+ // only expected in create path; don't reset/recycle existing
+ if (headers is not null)
+ {
+ var count = headers.Count;
+ var index = 0;
+ if (count != 0)
+ {
+ var arr = ArrayPool<(string, StringValues)>.Shared.Rent(count);
+ foreach (var header in headers)
+ {
+ if (OutputCacheEntryFormatter.ShouldStoreHeader(header.Key))
+ {
+ arr[index++] = (header.Key, header.Value);
+ }
+ }
+ if (index == 0) // only ignored headers
+ {
+ ArrayPool<(string, StringValues)>.Shared.Return(arr);
+ }
+ else
+ {
+ Headers = new(arr, 0, index);
+ }
+ }
+ }
+ return this;
+ }
+
+ public void CopyHeadersTo(IHeaderDictionary headers)
+ {
+ if (!TryFindHeader(HeaderNames.TransferEncoding, out _))
+ {
+ headers.ContentLength = Body.Length;
+ }
+ foreach (var header in Headers.Span)
+ {
+ headers[header.Name] = header.Value;
+ }
+ }
+
+ public ValueTask CopyToAsync(PipeWriter destination, CancellationToken cancellationToken)
+ => RecyclableReadOnlySequenceSegment.CopyToAsync(Body, destination, cancellationToken);
}
diff --git a/src/Middleware/OutputCaching/src/OutputCacheEntryFormatter.cs b/src/Middleware/OutputCaching/src/OutputCacheEntryFormatter.cs
index 38b11a9008f6..ee2b1052f10f 100644
--- a/src/Middleware/OutputCaching/src/OutputCacheEntryFormatter.cs
+++ b/src/Middleware/OutputCaching/src/OutputCacheEntryFormatter.cs
@@ -2,10 +2,14 @@
// The .NET Foundation licenses this file to you under the MIT license.
using System.Buffers;
+using System.Collections.Frozen;
+using System.Diagnostics;
using System.Linq;
+using System.Runtime.CompilerServices;
using System.Text;
-using Microsoft.AspNetCore.OutputCaching.Serialization;
using Microsoft.Extensions.Logging;
+using Microsoft.Extensions.Primitives;
+using Microsoft.Net.Http.Headers;
namespace Microsoft.AspNetCore.OutputCaching;
///
@@ -13,7 +17,11 @@ namespace Microsoft.AspNetCore.OutputCaching;
///
internal static class OutputCacheEntryFormatter
{
- private const byte SerializationRevision = 1;
+ private enum SerializationRevision
+ {
+ V1_Original = 1,
+ V2_OriginalWithCommonHeaders = 2,
+ }
public static async ValueTask GetAsync(string key, IOutputCacheStore store, CancellationToken cancellationToken)
{
@@ -21,81 +29,38 @@ internal static class OutputCacheEntryFormatter
var content = await store.GetAsync(key, cancellationToken);
- if (content == null)
- {
- return null;
- }
-
- var formatter = Deserialize(new MemoryStream(content));
-
- if (formatter == null)
+ if (content is null)
{
return null;
}
- var outputCacheEntry = new OutputCacheEntry
- {
- StatusCode = formatter.StatusCode,
- Created = formatter.Created,
- Tags = formatter.Tags,
- Headers = new(),
- Body = new CachedResponseBody(formatter.Body, formatter.Body.Sum(x => x.Length))
- };
-
- if (formatter.Headers != null)
- {
- foreach (var header in formatter.Headers)
- {
- outputCacheEntry.Headers.TryAdd(header.Key, header.Value);
- }
- }
-
- return outputCacheEntry;
+ return Deserialize(content);
}
- public static async ValueTask StoreAsync(string key, OutputCacheEntry value, TimeSpan duration, IOutputCacheStore store, ILogger logger, CancellationToken cancellationToken)
+ public static async ValueTask StoreAsync(string key, OutputCacheEntry value, HashSet? tags, TimeSpan duration, IOutputCacheStore store, ILogger logger, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(value);
ArgumentNullException.ThrowIfNull(value.Body);
ArgumentNullException.ThrowIfNull(value.Headers);
- var formatterEntry = new FormatterEntry
- {
- StatusCode = value.StatusCode,
- Created = value.Created,
- Tags = value.Tags,
- Body = value.Body.Segments
- };
-
- if (value.Headers != null)
- {
- formatterEntry.Headers = new();
- foreach (var header in value.Headers)
- {
- formatterEntry.Headers.TryAdd(header.Key, header.Value.ToArray());
- }
- }
+ var buffer = new RecyclableArrayBufferWriter();
+ Serialize(buffer, value);
- using var bufferStream = new MemoryStream();
-
- Serialize(bufferStream, formatterEntry);
-
- if (!bufferStream.TryGetBuffer(out var segment))
- {
- segment = bufferStream.ToArray();
- }
-
- var payload = new ReadOnlySequence(segment.Array!, segment.Offset, segment.Count);
try
{
if (store is IOutputCacheBufferStore bufferStore)
{
- await bufferStore.SetAsync(key, payload, value.Tags, duration, cancellationToken);
+ await bufferStore.SetAsync(key, new(buffer.GetMemory()), CopyToLeasedMemory(tags, out var lease), duration, cancellationToken);
+ if (lease is not null)
+ {
+ ArrayPool.Shared.Return(lease);
+ }
}
else
{
// legacy API/in-proc: create an isolated right-sized byte[] for the payload
- await store.SetAsync(key, payload.ToArray(), value.Tags, duration, cancellationToken);
+ string[] tagsArr = tags is { Count: > 0 } ? tags.ToArray() : Array.Empty();
+ await store.SetAsync(key, buffer.ToArray(), tagsArr, duration, cancellationToken);
}
}
catch (OperationCanceledException)
@@ -106,6 +71,23 @@ public static async ValueTask StoreAsync(string key, OutputCacheEntry value, Tim
{
logger.UnableToWriteToOutputCache(ex);
}
+ buffer.Dispose(); // this is intentionally not using "using"; only recycle on success, to avoid async code accessing shared buffers (esp. in cancellation)
+
+ static ReadOnlyMemory CopyToLeasedMemory(HashSet? tags, out string[]? lease)
+ {
+ if (tags is null || tags.Count == 0)
+ {
+ lease = null;
+ return default;
+ }
+ int index = 0;
+ lease = ArrayPool.Shared.Rent(tags.Count);
+ foreach (var tag in tags)
+ {
+ lease[index++] = tag;
+ }
+ return new ReadOnlyMemory(lease, 0, index);
+ }
}
// Format:
@@ -136,13 +118,13 @@ public static async ValueTask StoreAsync(string key, OutputCacheEntry value, Tim
// data byte length: 7-bit encoded int
// UTF-8 encoded byte[]
- private static void Serialize(Stream output, FormatterEntry entry)
+ private static void Serialize(IBufferWriter output, OutputCacheEntry entry)
{
- using var writer = new BinaryWriter(output);
+ var writer = new FormatterBinaryWriter(output);
// Serialization revision:
// 7-bit encoded int
- writer.Write7BitEncodedInt(SerializationRevision);
+ writer.Write7BitEncodedInt((int)SerializationRevision.V2_OriginalWithCommonHeaders);
// Creation date:
// Ticks: 7-bit encoded long
@@ -158,84 +140,107 @@ private static void Serialize(Stream output, FormatterEntry entry)
// Headers:
// Headers count: 7-bit encoded int
- writer.Write7BitEncodedInt(entry.Headers.Count);
+ writer.Write7BitEncodedInt(entry.Headers.Length);
// For each header:
// key name byte length: 7-bit encoded int
// UTF-8 encoded key name byte[]
- foreach (var header in entry.Headers)
+ foreach (var header in entry.Headers.Span)
{
- writer.Write(header.Key);
+ WriteCommonHeader(ref writer, header.Name);
// Values count: 7-bit encoded int
-
- if (header.Value == null)
- {
- writer.Write7BitEncodedInt(0);
- continue;
- }
- else
- {
- writer.Write7BitEncodedInt(header.Value.Length);
- }
+ var count = header.Value.Count;
+ writer.Write7BitEncodedInt(count);
// For each header value:
// data byte length: 7-bit encoded int
// UTF-8 encoded byte[]
-
- foreach (var value in header.Value)
+ for (var i = 0; i < count; i++)
{
- writer.Write(value ?? "");
+ WriteCommonHeader(ref writer, header.Value[i]);
}
}
// Body:
- // Segments count: 7-bit encoded int
- // For each segment:
- // data byte length: 7-bit encoded int
+ // Bytes count: 7-bit encoded int
// data byte[]
- writer.Write7BitEncodedInt(entry.Body.Count);
-
- foreach (var segment in entry.Body)
+ var body = entry.Body;
+ if (body.IsEmpty)
+ {
+ writer.Write((byte)0);
+ }
+ else if (body.IsSingleSegment)
{
- writer.Write7BitEncodedInt(segment.Length);
- writer.Write(segment);
+ var span = body.FirstSpan;
+ writer.Write7BitEncodedInt(span.Length);
+ writer.WriteRaw(span);
+ }
+ else
+ {
+ writer.Write7BitEncodedInt(checked((int)body.Length));
+ foreach (var segment in body)
+ {
+ writer.WriteRaw(segment.Span);
+ }
}
- // Tags:
- // Tags count: 7-bit encoded int
- // For each tag:
- // data byte length: 7-bit encoded int
- // UTF-8 encoded byte[]
+ writer.Flush();
+ }
- writer.Write7BitEncodedInt(entry.Tags.Length);
+ static void WriteCommonHeader(ref FormatterBinaryWriter writer, string? value)
+ {
+ if (string.IsNullOrEmpty(value))
+ {
+ writer.Write((byte)0);
+ }
+ else
+ {
+ if (CommonHeadersLookup.TryGetValue(value, out int known))
+ {
+ writer.Write7BitEncodedInt((known << 1) | 1);
+ }
+ else
+ {
+ // use the length-prefixed UTF8 write in FormatterBinaryWriter,
+ // but with a left-shift applied
+ writer.Write(value, lengthShift: 1);
+ }
+ }
+ }
- foreach (var tag in entry.Tags)
+ private static bool CanParseRevision(SerializationRevision revision, out bool useCommonHeaders)
+ {
+ switch (revision)
{
- writer.Write(tag ?? "");
+ case SerializationRevision.V1_Original: // we don't actively expect this much, since only in-proc back-end was shipped
+ useCommonHeaders = false;
+ return true;
+ case SerializationRevision.V2_OriginalWithCommonHeaders:
+ useCommonHeaders = true;
+ return true;
+ default:
+ // In future versions, also support the previous revision format.
+ useCommonHeaders = default;
+ return false;
}
}
- private static FormatterEntry? Deserialize(Stream content)
+ internal static OutputCacheEntry? Deserialize(ReadOnlyMemory content)
{
- using var reader = new BinaryReader(content);
+ var reader = new FormatterBinaryReader(content);
// Serialization revision:
// 7-bit encoded int
- var revision = reader.Read7BitEncodedInt();
-
- if (revision != SerializationRevision)
+ var revision = (SerializationRevision)reader.Read7BitEncodedInt();
+ if (!CanParseRevision(revision, out var useCommonHeaders))
{
- // In future versions, also support the previous revision format.
-
return null;
}
- var result = new FormatterEntry();
-
// Creation date:
// Ticks: 7-bit encoded long
// Offset.TotalMinutes: 7-bit encoded long
@@ -243,12 +248,14 @@ private static void Serialize(Stream output, FormatterEntry entry)
var ticks = reader.Read7BitEncodedInt64();
var offsetMinutes = reader.Read7BitEncodedInt64();
- result.Created = new DateTimeOffset(ticks, TimeSpan.FromMinutes(offsetMinutes));
+ var created = new DateTimeOffset(ticks, TimeSpan.FromMinutes(offsetMinutes));
// Status code:
// 7-bit encoded int
- result.StatusCode = reader.Read7BitEncodedInt();
+ var statusCode = reader.Read7BitEncodedInt();
+
+ var result = new OutputCacheEntry(created, statusCode);
// Headers:
// Headers count: 7-bit encoded int
@@ -259,71 +266,263 @@ private static void Serialize(Stream output, FormatterEntry entry)
// key name byte length: 7-bit encoded int
// UTF-8 encoded key name byte[]
// Values count: 7-bit encoded int
+ if (headersCount > 0)
+ {
+ var headerArr = ArrayPool<(string Name, StringValues Values)>.Shared.Rent(headersCount);
- result.Headers = new Dictionary(headersCount);
+ for (var i = 0; i < headersCount; i++)
+ {
+ var key = useCommonHeaders ? ReadCommonHeader(ref reader) : reader.ReadString();
+ StringValues value;
+ var valuesCount = reader.Read7BitEncodedInt();
+ // For each header value:
+ // data byte length: 7-bit encoded int
+ // UTF-8 encoded byte[]
+ switch (valuesCount)
+ {
+ case < 0:
+ throw new InvalidOperationException();
+ case 0:
+ value = StringValues.Empty;
+ break;
+ case 1:
+ value = new(useCommonHeaders ? ReadCommonHeader(ref reader) : reader.ReadString());
+ break;
+ default:
+ var values = new string[valuesCount];
+
+ for (var j = 0; j < valuesCount; j++)
+ {
+ values[j] = useCommonHeaders ? ReadCommonHeader(ref reader) : reader.ReadString();
+ }
+ value = new(values);
+ break;
+ }
+ headerArr[i] = (key, value);
+ }
+ result.SetHeaders(new ReadOnlyMemory<(string Name, StringValues Values)>(headerArr, 0, headersCount));
+ }
- for (var i = 0; i < headersCount; i++)
+ if (revision == SerializationRevision.V1_Original)
{
- var key = reader.ReadString();
+ // Body:
+ // Segments count: 7-bit encoded int
- var valuesCount = reader.Read7BitEncodedInt();
+ var segmentsCount = reader.Read7BitEncodedInt();
- // For each header value:
- // data byte length: 7-bit encoded int
- // UTF-8 encoded byte[]
+ // For each segment:
+ // data byte length: 7-bit encoded int
+ // data byte[]
- var values = new string[valuesCount];
+ switch (segmentsCount)
+ {
+ case 0:
+ // nothing to do
+ break;
+ case 1:
+ result.SetBody(new ReadOnlySequence(ReadSegment(ref reader)), recycleBuffers: false); // we're reusing the live payload buffers
+ break;
+ case < 0:
+ throw new InvalidOperationException();
+ default:
+ RecyclableReadOnlySequenceSegment first = RecyclableReadOnlySequenceSegment.Create(ReadSegment(ref reader), null), last = first;
+ for (int i = 1; i < segmentsCount; i++)
+ {
+ last = RecyclableReadOnlySequenceSegment.Create(ReadSegment(ref reader), last);
+ }
+ result.SetBody(new ReadOnlySequence(first, 0, last, last.Length), recycleBuffers: false); // we're reusing the live payload buffers
+ break;
+ }
- for (var j = 0; j < valuesCount; j++)
+ static ReadOnlyMemory ReadSegment(ref FormatterBinaryReader reader)
{
- values[j] = reader.ReadString();
+ var segmentLength = reader.Read7BitEncodedInt();
+ return reader.ReadBytesMemory(segmentLength);
}
- result.Headers[key] = values;
+ // we can just stop reading, but: here's how we'd skip tags if we had to
+ // (actually validate them in debug to prove reader)
+#if DEBUG
+ if (revision == SerializationRevision.V1_Original)
+ {
+ // Tags:
+ // Tags count: 7-bit encoded int
+
+ var tagsCount = reader.Read7BitEncodedInt();
+ if (tagsCount > 0)
+ {
+ // For each tag:
+ // data byte length: 7-bit encoded int
+ // UTF-8 encoded byte[]
+ for (var i = 0; i < tagsCount; i++)
+ {
+ reader.SkipString();
+ }
+ }
+ }
+#endif
}
-
- // Body:
- // Segments count: 7-bit encoded int
-
- var segmentsCount = reader.Read7BitEncodedInt();
-
- // For each segment:
- // data byte length: 7-bit encoded int
- // data byte[]
-
- var segments = new List(segmentsCount);
-
- for (var i = 0; i < segmentsCount; i++)
+ else
{
- var segmentLength = reader.Read7BitEncodedInt();
- var segment = reader.ReadBytes(segmentLength);
+ // Body:
+ // Bytes count: 7-bit encoded int
- segments.Add(segment);
+ var payloadLength = checked((int)reader.Read7BitEncodedInt64());
+ if (payloadLength != 0)
+ { // since the reader only supports linear memory currently, read the entire chunk as a single piece
+ result.SetBody(new(reader.ReadBytesMemory(payloadLength)), recycleBuffers: false); // we're reusing the live payload buffers
+ }
}
- result.Body = segments;
-
- // Tags:
- // Tags count: 7-bit encoded int
-
- var tagsCount = reader.Read7BitEncodedInt();
-
- // For each tag:
- // data byte length: 7-bit encoded int
- // UTF-8 encoded byte[]
-
- var tags = new string[tagsCount];
+ Debug.Assert(reader.IsEOF, "should have read entire payload");
+ return result;
+ }
- for (var i = 0; i < tagsCount; i++)
+ private static string ReadCommonHeader(ref FormatterBinaryReader reader)
+ {
+ int preamble = reader.Read7BitEncodedInt();
+ // LSB means "using common header/value"
+ if ((preamble & 1) == 1)
+ {
+ // non-LSB is the index of the common header
+ return CommonHeaders[preamble >> 1];
+ }
+ else
{
- var tagLength = reader.Read7BitEncodedInt();
- var tagData = reader.ReadBytes(tagLength);
- var tag = Encoding.UTF8.GetString(tagData);
+ // non-LSB is the string length
+ return reader.ReadString(preamble >> 1);
+ }
+ }
- tags[i] = tag;
+ static readonly string[] CommonHeaders = new string[]
+ {
+ // DO NOT remove values, and do not re-order/insert - append only
+ // NOTE: arbitrary common strings are fine - it doesn't all have to be headers
+ HeaderNames.Accept,
+ HeaderNames.AcceptCharset,
+ HeaderNames.AcceptEncoding,
+ HeaderNames.AcceptLanguage,
+ HeaderNames.AcceptRanges,
+ HeaderNames.AccessControlAllowCredentials,
+ HeaderNames.AccessControlAllowHeaders,
+ HeaderNames.AccessControlAllowMethods,
+ HeaderNames.AccessControlAllowOrigin,
+ HeaderNames.AccessControlExposeHeaders,
+ HeaderNames.AccessControlMaxAge,
+ HeaderNames.AccessControlRequestHeaders,
+ HeaderNames.AccessControlRequestMethod,
+ HeaderNames.Age,
+ HeaderNames.Allow,
+ HeaderNames.AltSvc,
+ HeaderNames.Authorization,
+ HeaderNames.Baggage,
+ HeaderNames.CacheControl,
+ HeaderNames.Connection,
+ HeaderNames.ContentDisposition,
+ HeaderNames.ContentEncoding,
+ HeaderNames.ContentLanguage,
+ HeaderNames.ContentLength,
+ HeaderNames.ContentLocation,
+ HeaderNames.ContentMD5,
+ HeaderNames.ContentRange,
+ HeaderNames.ContentSecurityPolicy,
+ HeaderNames.ContentSecurityPolicyReportOnly,
+ HeaderNames.ContentType,
+ HeaderNames.CorrelationContext,
+ HeaderNames.Cookie,
+ HeaderNames.Date,
+ HeaderNames.DNT,
+ HeaderNames.ETag,
+ HeaderNames.Expires,
+ HeaderNames.Expect,
+ HeaderNames.From,
+ HeaderNames.Host,
+ HeaderNames.KeepAlive,
+ HeaderNames.IfMatch,
+ HeaderNames.IfModifiedSince,
+ HeaderNames.IfNoneMatch,
+ HeaderNames.IfRange,
+ HeaderNames.IfUnmodifiedSince,
+ HeaderNames.LastModified,
+ HeaderNames.Link,
+ HeaderNames.Location,
+ HeaderNames.MaxForwards,
+ HeaderNames.Origin,
+ HeaderNames.Pragma,
+ HeaderNames.ProxyAuthenticate,
+ HeaderNames.ProxyAuthorization,
+ HeaderNames.ProxyConnection,
+ HeaderNames.Range,
+ HeaderNames.Referer,
+ HeaderNames.RequestId,
+ HeaderNames.RetryAfter,
+ HeaderNames.Server,
+ HeaderNames.StrictTransportSecurity,
+ HeaderNames.TE,
+ HeaderNames.Trailer,
+ HeaderNames.TransferEncoding,
+ HeaderNames.Translate,
+ HeaderNames.TraceParent,
+ HeaderNames.TraceState,
+ HeaderNames.Vary,
+ HeaderNames.Via,
+ HeaderNames.Warning,
+ HeaderNames.XContentTypeOptions,
+ HeaderNames.XFrameOptions,
+ HeaderNames.XPoweredBy,
+ HeaderNames.XRequestedWith,
+ HeaderNames.XUACompatible,
+ HeaderNames.XXSSProtection,
+ // additional MSFT headers
+ "X-Rtag",
+ "X-Vhost",
+
+ // for Content-Type
+ "text/html",
+ "text/html; charset=utf-8",
+ "text/html;charset=utf-8",
+ "text/xml",
+ "text/json",
+ "application/x-binary",
+ "image/svg+xml",
+ "image/x-png",
+ // for Accept-Encoding
+ "gzip",
+ "compress",
+ "deflate",
+ "br",
+ "identity",
+ "*",
+ // for X-Frame-Options
+ "SAMEORIGIN",
+ "DENY",
+ // for X-Content-Type
+ "nosniff"
+
+ // if you add new options here, you should rev the api version
+ };
+
+ private static readonly FrozenSet IgnoredHeaders = FrozenSet.ToFrozenSet(new[] {
+ HeaderNames.RequestId, HeaderNames.ContentLength, HeaderNames.Age
+ }, StringComparer.OrdinalIgnoreCase);
+
+ private static readonly FrozenDictionary CommonHeadersLookup = BuildCommonHeadersLookup();
+
+ static FrozenDictionary BuildCommonHeadersLookup()
+ {
+ var arr = CommonHeaders;
+ var pairs = new List>(arr.Length);
+ for (var i = 0; i < arr.Length; i++)
+ {
+ var header = arr[i];
+ if (!string.IsNullOrWhiteSpace(header)) // omit null/empty values
+ {
+ pairs.Add(new(header, i));
+ }
}
- result.Tags = tags;
- return result;
+ return FrozenDictionary.ToFrozenDictionary(pairs, StringComparer.OrdinalIgnoreCase);
}
+
+ internal static bool ShouldStoreHeader(string key) => !IgnoredHeaders.Contains(key);
}
diff --git a/src/Middleware/OutputCaching/src/OutputCacheMiddleware.cs b/src/Middleware/OutputCaching/src/OutputCacheMiddleware.cs
index 7748d0b5c300..5ea07ac86f9e 100644
--- a/src/Middleware/OutputCaching/src/OutputCacheMiddleware.cs
+++ b/src/Middleware/OutputCaching/src/OutputCacheMiddleware.cs
@@ -1,7 +1,6 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
-using System.Linq;
using Microsoft.AspNetCore.Http;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.ObjectPool;
@@ -96,7 +95,7 @@ private async Task InvokeAwaited(HttpContext httpContext, IReadOnlyList ExecuteResponseAsync()
{
@@ -176,23 +185,28 @@ private async Task InvokeAwaited(HttpContext httpContext, IReadOnlyList TryServeCachedResponseAsync(OutputCacheContext context
context.IsCacheEntryFresh = false;
}
+ var cachedResponse = context.CachedResponse;
if (context.IsCacheEntryFresh)
{
- var cachedResponseHeaders = context.CachedResponse.Headers;
-
// Check conditional request rules
if (ContentIsNotModified(context))
{
_logger.NotModifiedServed();
context.HttpContext.Response.StatusCode = StatusCodes.Status304NotModified;
- if (cachedResponseHeaders != null)
+ foreach (var key in HeadersToIncludeIn304)
{
- foreach (var key in HeadersToIncludeIn304)
+ if (cachedResponse.TryFindHeader(key, out var values))
{
- if (cachedResponseHeaders.TryGetValue(key, out var values))
- {
- context.HttpContext.Response.Headers[key] = values;
- }
+ context.HttpContext.Response.Headers[key] = values;
}
}
}
@@ -286,15 +296,9 @@ internal async Task TryServeCachedResponseAsync(OutputCacheContext context
{
var response = context.HttpContext.Response;
// Copy the cached status code and response headers
- response.StatusCode = context.CachedResponse.StatusCode;
+ response.StatusCode = cachedResponse.StatusCode;
- if (context.CachedResponse.Headers != null)
- {
- foreach (var header in context.CachedResponse.Headers)
- {
- response.Headers[header.Key] = header.Value;
- }
- }
+ cachedResponse.CopyHeadersTo(response.Headers);
// Note: int64 division truncates result and errors may be up to 1 second. This reduction in
// accuracy of age calculation is considered appropriate since it is small compared to clock
@@ -304,11 +308,11 @@ internal async Task TryServeCachedResponseAsync(OutputCacheContext context
// Copy the cached response body
var body = context.CachedResponse.Body;
- if (body != null && body.Length > 0)
+ if (!body.IsEmpty)
{
try
{
- await body.CopyToAsync(response.BodyWriter, context.HttpContext.RequestAborted);
+ await context.CachedResponse.CopyToAsync(response.BodyWriter, context.HttpContext.RequestAborted);
}
catch (OperationCanceledException)
{
@@ -319,7 +323,6 @@ internal async Task TryServeCachedResponseAsync(OutputCacheContext context
}
return true;
}
-
return false;
}
@@ -396,22 +399,9 @@ internal void FinalizeCacheHeaders(OutputCacheContext context)
headers.Date = HeaderUtilities.FormatDate(context.ResponseTime!.Value);
// Store the response on the state
- context.CachedResponse = new OutputCacheEntry
- {
- Created = context.ResponseTime!.Value,
- StatusCode = response.StatusCode,
- Tags = context.Tags.ToArray()
- };
-
- foreach (var header in headers)
- {
- context.CachedResponse.Headers ??= new();
-
- if (!string.Equals(header.Key, HeaderNames.Age, StringComparison.OrdinalIgnoreCase))
- {
- context.CachedResponse.Headers[header.Key] = header.Value;
- }
- }
+ var cacheEntry = new OutputCacheEntry(context.ResponseTime!.Value, response.StatusCode)
+ .CopyHeadersFrom(headers);
+ context.CachedResponse = cacheEntry;
return;
}
@@ -424,7 +414,8 @@ internal void FinalizeCacheHeaders(OutputCacheContext context)
///
internal async ValueTask FinalizeCacheBodyAsync(OutputCacheContext context)
{
- if (context.AllowCacheStorage && context.OutputCacheStream.BufferingEnabled)
+ if (context.AllowCacheStorage && context.OutputCacheStream.BufferingEnabled
+ && context.CachedResponse is not null)
{
// If AllowCacheLookup is false, the cache key was not created
CreateCacheKey(context);
@@ -436,15 +427,8 @@ internal async ValueTask FinalizeCacheBodyAsync(OutputCacheContext context)
|| (cachedResponseBody.Length == 0
&& HttpMethods.IsHead(context.HttpContext.Request.Method)))
{
- var response = context.HttpContext.Response;
- // Add a content-length if required
- if (!response.ContentLength.HasValue && StringValues.IsNullOrEmpty(response.Headers.TransferEncoding))
- {
- context.CachedResponse.Headers ??= new();
- context.CachedResponse.Headers.ContentLength = cachedResponseBody.Length;
- }
-
- context.CachedResponse.Body = cachedResponseBody;
+ // transfer lifetime from the buffer to the cached response
+ context.CachedResponse.SetBody(cachedResponseBody, recycleBuffers: true);
if (string.IsNullOrEmpty(context.CacheKey))
{
@@ -453,7 +437,9 @@ internal async ValueTask FinalizeCacheBodyAsync(OutputCacheContext context)
else
{
_logger.ResponseCached();
- await OutputCacheEntryFormatter.StoreAsync(context.CacheKey, context.CachedResponse, context.CachedResponseValidFor, _store, _logger, context.HttpContext.RequestAborted);
+
+ await OutputCacheEntryFormatter.StoreAsync(context.CacheKey, context.CachedResponse, context.Tags, context.CachedResponseValidFor,
+ _store, _logger, context.HttpContext.RequestAborted);
}
}
else
@@ -528,9 +514,14 @@ internal static void UnshimResponseStream(OutputCacheContext context)
internal bool ContentIsNotModified(OutputCacheContext context)
{
- var cachedResponseHeaders = context.CachedResponse.Headers;
+ var cachedResponse = context.CachedResponse;
var ifNoneMatchHeader = context.HttpContext.Request.Headers.IfNoneMatch;
+ if (cachedResponse is null)
+ {
+ return false;
+ }
+
if (!StringValues.IsNullOrEmpty(ifNoneMatchHeader))
{
if (ifNoneMatchHeader.Count == 1 && StringSegment.Equals(ifNoneMatchHeader[0], EntityTagHeaderValue.Any.Tag, StringComparison.OrdinalIgnoreCase))
@@ -538,9 +529,10 @@ internal bool ContentIsNotModified(OutputCacheContext context)
_logger.NotModifiedIfNoneMatchStar();
return true;
}
-
- if (cachedResponseHeaders != null && !StringValues.IsNullOrEmpty(cachedResponseHeaders[HeaderNames.ETag])
- && EntityTagHeaderValue.TryParse(cachedResponseHeaders[HeaderNames.ETag].ToString(), out var eTag)
+
+ if (cachedResponse.TryFindHeader(HeaderNames.ETag, out var raw)
+ && !StringValues.IsNullOrEmpty(raw)
+ && EntityTagHeaderValue.TryParse(raw.ToString(), out var eTag)
&& EntityTagHeaderValue.TryParseList(ifNoneMatchHeader, out var ifNoneMatchETags))
{
for (var i = 0; i < ifNoneMatchETags?.Count; i++)
@@ -559,13 +551,8 @@ internal bool ContentIsNotModified(OutputCacheContext context)
var ifModifiedSince = context.HttpContext.Request.Headers.IfModifiedSince;
if (!StringValues.IsNullOrEmpty(ifModifiedSince))
{
- if (cachedResponseHeaders == null)
- {
- return false;
- }
-
- if (!HeaderUtilities.TryParseDate(cachedResponseHeaders[HeaderNames.LastModified].ToString(), out var modified) &&
- !HeaderUtilities.TryParseDate(cachedResponseHeaders[HeaderNames.Date].ToString(), out modified))
+ if (!HeaderUtilities.TryParseDate(cachedResponse.FindHeader(HeaderNames.LastModified).ToString(), out var modified) &&
+ !HeaderUtilities.TryParseDate(cachedResponse.FindHeader(HeaderNames.Date).ToString(), out modified))
{
return false;
}
diff --git a/src/Middleware/OutputCaching/src/RecyclableArrayBufferWriter.cs b/src/Middleware/OutputCaching/src/RecyclableArrayBufferWriter.cs
new file mode 100644
index 000000000000..b42ccbbebfd4
--- /dev/null
+++ b/src/Middleware/OutputCaching/src/RecyclableArrayBufferWriter.cs
@@ -0,0 +1,129 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System.Buffers;
+using System.Diagnostics;
+
+namespace Microsoft.AspNetCore.OutputCaching;
+
+// this is effectively a cut-down re-implementation of ArrayBufferWriter
+// from https://github.com/dotnet/runtime/blob/6cd9bf1937c3b4d2f7304a6c534aacde58a202b6/src/libraries/Common/src/System/Buffers/ArrayBufferWriter.cs
+// except it uses the array pool for allocations
+internal sealed class RecyclableArrayBufferWriter : IBufferWriter, IDisposable
+{
+
+ // Copy of Array.MaxLength.
+ // Used by projects targeting .NET Framework.
+ private const int ArrayMaxLength = 0x7FFFFFC7;
+
+ private const int DefaultInitialBufferSize = 256;
+
+ private T[] _buffer;
+ private int _index;
+
+ public int FreeCapacity => _buffer.Length - _index;
+
+ public RecyclableArrayBufferWriter()
+ {
+ _buffer = Array.Empty();
+ _index = 0;
+ }
+
+ public void Dispose()
+ {
+ var tmp = _buffer;
+ _index = 0;
+ _buffer = Array.Empty();
+ if (tmp.Length != 0)
+ {
+ ArrayPool.Shared.Return(tmp);
+ }
+ }
+
+ public void Advance(int count)
+ {
+ if (count < 0)
+ {
+ throw new ArgumentException(null, nameof(count));
+ }
+
+ if (_index > _buffer.Length - count)
+ {
+ Throw();
+ }
+
+ _index += count;
+
+ static void Throw()
+ => throw new ArgumentOutOfRangeException(nameof(count));
+ }
+
+ public Memory GetMemory(int sizeHint = 0)
+ {
+ CheckAndResizeBuffer(sizeHint);
+ Debug.Assert(_buffer.Length > _index);
+ return _buffer.AsMemory(_index);
+ }
+
+ public Span GetSpan(int sizeHint = 0)
+ {
+ CheckAndResizeBuffer(sizeHint);
+ Debug.Assert(_buffer.Length > _index);
+ return _buffer.AsSpan(_index);
+ }
+
+ // create a standalone isolated copy of the buffer
+ public T[] ToArray() => _buffer.AsSpan(0, _index).ToArray();
+
+ private void CheckAndResizeBuffer(int sizeHint)
+ {
+ ArgumentOutOfRangeException.ThrowIfNegative(sizeHint);
+
+ if (sizeHint == 0)
+ {
+ sizeHint = 1;
+ }
+
+ if (sizeHint > FreeCapacity)
+ {
+ int currentLength = _buffer.Length;
+
+ // Attempt to grow by the larger of the sizeHint and double the current size.
+ int growBy = Math.Max(sizeHint, currentLength);
+
+ if (currentLength == 0)
+ {
+ growBy = Math.Max(growBy, DefaultInitialBufferSize);
+ }
+
+ int newSize = currentLength + growBy;
+
+ if ((uint)newSize > int.MaxValue)
+ {
+ // Attempt to grow to ArrayMaxLength.
+ uint needed = (uint)(currentLength - FreeCapacity + sizeHint);
+ Debug.Assert(needed > currentLength);
+
+ if (needed > ArrayMaxLength)
+ {
+ ThrowOutOfMemoryException();
+ }
+
+ newSize = ArrayMaxLength;
+ }
+
+ // resize the backing buffer
+ var oldArray = _buffer;
+ _buffer = ArrayPool.Shared.Rent(newSize);
+ oldArray.AsSpan(0, _index).CopyTo(_buffer);
+ if (oldArray.Length != 0)
+ {
+ ArrayPool.Shared.Return(oldArray);
+ }
+ }
+
+ Debug.Assert(FreeCapacity > 0 && FreeCapacity >= sizeHint);
+
+ static void ThrowOutOfMemoryException() => throw new InvalidOperationException("Unable to grow buffer as requested");
+ }
+}
diff --git a/src/Middleware/OutputCaching/src/RecyclableReadOnlySequenceSegment.cs b/src/Middleware/OutputCaching/src/RecyclableReadOnlySequenceSegment.cs
new file mode 100644
index 000000000000..17511cb52073
--- /dev/null
+++ b/src/Middleware/OutputCaching/src/RecyclableReadOnlySequenceSegment.cs
@@ -0,0 +1,125 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System.Buffers;
+using System.Collections.Concurrent;
+using System.IO.Pipelines;
+using System.Runtime.InteropServices;
+
+namespace Microsoft.AspNetCore.OutputCaching;
+
+internal sealed class RecyclableReadOnlySequenceSegment : ReadOnlySequenceSegment
+{
+ public int Length => Memory.Length;
+ private RecyclableReadOnlySequenceSegment() { }
+
+ public static RecyclableReadOnlySequenceSegment Create(int minimumLength, RecyclableReadOnlySequenceSegment? previous)
+ => Create(Rent(minimumLength), previous);
+
+ public static RecyclableReadOnlySequenceSegment Create(ReadOnlyMemory memory, RecyclableReadOnlySequenceSegment? previous)
+ {
+ var obj = s_Spares.TryDequeue(out var value) ? value : new();
+ obj.Memory = memory;
+ if (previous is not null)
+ {
+ obj.RunningIndex = previous.RunningIndex + previous.Length;
+ previous.Next = obj;
+ }
+ return obj;
+ }
+
+ private const int TARGET_MAX = 128;
+ static readonly ConcurrentQueue s_Spares = new();
+
+ public static void RecycleChain(RecyclableReadOnlySequenceSegment? obj, bool recycleBuffers = false)
+ {
+ while (obj is not null)
+ {
+ var mem = obj.Memory;
+ obj.Memory = default;
+ obj.RunningIndex = 0;
+ var next = obj.Next as RecyclableReadOnlySequenceSegment;
+ obj.Next = default;
+ if (s_Spares.Count < TARGET_MAX) // not precise, due to not wanting lock
+ { // (note: we still want to break the chain, even if not reusing; no else-break)
+ s_Spares.Enqueue(obj);
+ }
+ if (recycleBuffers)
+ {
+ Recycle(mem);
+ }
+ obj = next;
+ }
+ }
+ public static void RecycleChain(in ReadOnlySequence value, bool recycleBuffers = false)
+ {
+ var obj = value.Start.GetObject() as RecyclableReadOnlySequenceSegment;
+ if (obj is null)
+ {
+ // not segment based, but memory may still need recycling
+ if (recycleBuffers)
+ {
+ Recycle(value.First);
+ }
+ }
+ else
+ {
+ RecycleChain(obj, recycleBuffers);
+ }
+ }
+
+ internal static ReadOnlySequence CreateSequence(IList segments)
+ {
+ if (segments is null)
+ {
+ return default;
+ }
+ int count = segments.Count;
+ switch (count)
+ {
+ case 0:
+ return default;
+ case 1:
+ return new(segments[0]);
+ default:
+ RecyclableReadOnlySequenceSegment first = Create(segments[0], null), last = first;
+ for (int i = 1; i < count; i++)
+ {
+ last = Create(segments[i], last);
+ }
+ return new(first, 0, last, last.Length);
+ }
+ }
+
+ public static async ValueTask CopyToAsync(ReadOnlySequence source, PipeWriter destination, CancellationToken cancellationToken)
+ {
+ if (!source.IsEmpty)
+ {
+ if (source.IsSingleSegment)
+ {
+ await destination.WriteAsync(source.First, cancellationToken);
+ }
+ else
+ {
+ foreach (var segment in source)
+ {
+ if (!segment.IsEmpty)
+ {
+ await destination.WriteAsync(segment, cancellationToken);
+ }
+ }
+ }
+ }
+ }
+
+ private static byte[] Rent(int minimumLength)
+ => ArrayPool.Shared.Rent(minimumLength);
+
+ private static void Recycle(ReadOnlyMemory value)
+ {
+ if (MemoryMarshal.TryGetArray(value, out var segment) && segment.Offset == 0 && segment.Count != 0)
+ {
+ ArrayPool.Shared.Return(segment.Array!);
+ }
+ }
+}
diff --git a/src/Middleware/OutputCaching/src/RecyclableSequenceBuilder.cs b/src/Middleware/OutputCaching/src/RecyclableSequenceBuilder.cs
new file mode 100644
index 000000000000..5dc776055e1f
--- /dev/null
+++ b/src/Middleware/OutputCaching/src/RecyclableSequenceBuilder.cs
@@ -0,0 +1,124 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System.Buffers;
+using System.Diagnostics;
+using System.Runtime.InteropServices;
+
+namespace Microsoft.AspNetCore.OutputCaching;
+
+// allows capture of written payloads into a ReadOnlySequence based on RecyclableReadOnlySequenceSegment
+internal sealed class RecyclableSequenceBuilder : IDisposable
+{
+ private RecyclableReadOnlySequenceSegment? _firstSegment, _currentSegment;
+ private int _currentSegmentIndex;
+ private readonly int _segmentSize;
+ private bool _closed;
+
+ public long Length { get; private set; }
+
+ internal RecyclableSequenceBuilder(int segmentSize)
+ {
+ ArgumentOutOfRangeException.ThrowIfNegativeOrZero(segmentSize);
+
+ _segmentSize = segmentSize;
+ }
+
+ // Extracting the buffered segments closes the stream for writing
+ internal ReadOnlySequence DetachAndReset()
+ {
+ _closed = true;
+ if (_firstSegment is null)
+ {
+ return default;
+ }
+ if (ReferenceEquals(_firstSegment, _currentSegment))
+ {
+ // single segment; use a simple sequence (no segments)
+ ReadOnlyMemory memory = _firstSegment.Memory.Slice(0, _currentSegmentIndex);
+ // we *can* recycle our single segment, just: keep the buffers
+ RecyclableReadOnlySequenceSegment.RecycleChain(_firstSegment, recycleBuffers: false);
+ // reset local state
+ _firstSegment = _currentSegment = null;
+ _currentSegmentIndex = 0;
+ return new(memory);
+ }
+
+ // use a segmented sequence
+ var payload = new ReadOnlySequence(_firstSegment, 0, _currentSegment!, _currentSegmentIndex);
+
+ // reset our local state for an abundance of caution
+ _firstSegment = _currentSegment = null;
+ _currentSegmentIndex = 0;
+
+ return payload;
+ }
+
+ public void Dispose() => RecyclableReadOnlySequenceSegment.RecycleChain(DetachAndReset(), recycleBuffers: true);
+
+ private Span GetBuffer()
+ {
+ if (_closed)
+ {
+ Throw();
+ }
+ static void Throw() => throw new ObjectDisposedException(nameof(RecyclableSequenceBuilder), "The stream has been closed for writing.");
+
+ if (_firstSegment is null)
+ {
+ _currentSegment = _firstSegment = RecyclableReadOnlySequenceSegment.Create(_segmentSize, null);
+ _currentSegmentIndex = 0;
+ }
+
+ Debug.Assert(_currentSegment is not null);
+ var current = _currentSegment.Memory;
+ Debug.Assert(_currentSegmentIndex >= 0 && _currentSegmentIndex <= current.Length);
+
+ if (_currentSegmentIndex == current.Length)
+ {
+ _currentSegment = RecyclableReadOnlySequenceSegment.Create(_segmentSize, _currentSegment);
+ _currentSegmentIndex = 0;
+ current = _currentSegment.Memory;
+ }
+
+ // have capacity in current chunk
+ return MemoryMarshal.AsMemory(current).Span.Slice(_currentSegmentIndex);
+ }
+ public void Write(ReadOnlySpan buffer)
+ {
+ while (!buffer.IsEmpty)
+ {
+ var available = GetBuffer();
+ if (available.Length >= buffer.Length)
+ {
+ buffer.CopyTo(available);
+ Advance(buffer.Length);
+ return; // all done
+ }
+ else
+ {
+ var toWrite = Math.Min(buffer.Length, available.Length);
+ if (toWrite <= 0)
+ {
+ Throw();
+ }
+ buffer.Slice(0, toWrite).CopyTo(available);
+ Advance(toWrite);
+ buffer = buffer.Slice(toWrite);
+ }
+ }
+ static void Throw() => throw new InvalidOperationException("Unable to acquire non-empty write buffer");
+ }
+
+ private void Advance(int count)
+ {
+ _currentSegmentIndex += count;
+ Length += count;
+ }
+
+ public void WriteByte(byte value)
+ {
+ GetBuffer()[0] = value;
+ Advance(1);
+ }
+}
diff --git a/src/Middleware/OutputCaching/src/Serialization/FormatterEntry.cs b/src/Middleware/OutputCaching/src/Serialization/FormatterEntry.cs
deleted file mode 100644
index f3aadfbadd6b..000000000000
--- a/src/Middleware/OutputCaching/src/Serialization/FormatterEntry.cs
+++ /dev/null
@@ -1,12 +0,0 @@
-// Licensed to the .NET Foundation under one or more agreements.
-// The .NET Foundation licenses this file to you under the MIT license.
-
-namespace Microsoft.AspNetCore.OutputCaching.Serialization;
-internal sealed class FormatterEntry
-{
- public DateTimeOffset Created { get; set; }
- public int StatusCode { get; set; }
- public Dictionary Headers { get; set; } = default!;
- public List Body { get; set; } = default!;
- public string[] Tags { get; set; } = default!;
-}
diff --git a/src/Middleware/OutputCaching/src/Streams/OutputCacheStream.cs b/src/Middleware/OutputCaching/src/Streams/OutputCacheStream.cs
index e8a51384edd4..5cce42ee408b 100644
--- a/src/Middleware/OutputCaching/src/Streams/OutputCacheStream.cs
+++ b/src/Middleware/OutputCaching/src/Streams/OutputCacheStream.cs
@@ -1,7 +1,7 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
-using Microsoft.AspNetCore.WriteStream;
+using System.Buffers;
namespace Microsoft.AspNetCore.OutputCaching;
@@ -10,7 +10,7 @@ internal sealed class OutputCacheStream : Stream
private readonly Stream _innerStream;
private readonly long _maxBufferSize;
private readonly int _segmentSize;
- private readonly SegmentWriteStream _segmentWriteStream;
+ private readonly RecyclableSequenceBuilder _segmentWriteStream;
private readonly Action _startResponseCallback;
internal OutputCacheStream(Stream innerStream, long maxBufferSize, int segmentSize, Action startResponseCallback)
@@ -19,7 +19,7 @@ internal OutputCacheStream(Stream innerStream, long maxBufferSize, int segmentSi
_maxBufferSize = maxBufferSize;
_segmentSize = segmentSize;
_startResponseCallback = startResponseCallback;
- _segmentWriteStream = new SegmentWriteStream(_segmentSize);
+ _segmentWriteStream = new(_segmentSize);
}
internal bool BufferingEnabled { get; private set; } = true;
@@ -42,13 +42,13 @@ public override long Position
}
}
- internal CachedResponseBody GetCachedResponseBody()
+ internal ReadOnlySequence GetCachedResponseBody()
{
if (!BufferingEnabled)
{
throw new InvalidOperationException("Buffer stream cannot be retrieved since buffering is disabled.");
}
- return new CachedResponseBody(_segmentWriteStream.GetSegments(), _segmentWriteStream.Length);
+ return _segmentWriteStream.DetachAndReset();
}
internal void DisableBuffering()
@@ -122,7 +122,33 @@ public override void Write(byte[] buffer, int offset, int count)
}
else
{
- _segmentWriteStream.Write(buffer, offset, count);
+ _segmentWriteStream.Write(buffer.AsSpan(offset, count));
+ }
+ }
+ }
+
+ public override void Write(ReadOnlySpan buffer)
+ {
+ try
+ {
+ _startResponseCallback();
+ _innerStream.Write(buffer);
+ }
+ catch
+ {
+ DisableBuffering();
+ throw;
+ }
+
+ if (BufferingEnabled)
+ {
+ if (_segmentWriteStream.Length + buffer.Length > _maxBufferSize)
+ {
+ DisableBuffering();
+ }
+ else
+ {
+ _segmentWriteStream.Write(buffer);
}
}
}
@@ -151,7 +177,7 @@ public override async ValueTask WriteAsync(ReadOnlyMemory buffer, Cancella
}
else
{
- await _segmentWriteStream.WriteAsync(buffer, cancellationToken);
+ _segmentWriteStream.Write(buffer.Span);
}
}
}
@@ -181,6 +207,15 @@ public override void WriteByte(byte value)
}
}
+ protected override void Dispose(bool disposing)
+ {
+ if (disposing)
+ {
+ _segmentWriteStream?.Dispose();
+ }
+ base.Dispose(disposing);
+ }
+
public override IAsyncResult BeginWrite(byte[] buffer, int offset, int count, AsyncCallback? callback, object? state)
=> TaskToApm.Begin(WriteAsync(buffer, offset, count, CancellationToken.None), callback, state);
diff --git a/src/Middleware/OutputCaching/test/CachedResponseBodyTests.cs b/src/Middleware/OutputCaching/test/CachedResponseBodyTests.cs
index 6867fc2fb7e9..89c371ea5f26 100644
--- a/src/Middleware/OutputCaching/test/CachedResponseBodyTests.cs
+++ b/src/Middleware/OutputCaching/test/CachedResponseBodyTests.cs
@@ -15,18 +15,10 @@ public class CachedResponseBodyTests
public void GetSegments()
{
var segments = new List();
- var body = new CachedResponseBody(segments, 0);
+ var body = RecyclableReadOnlySequenceSegment.CreateSequence(segments);
- Assert.Same(segments, body.Segments);
- }
-
- [Fact]
- public void GetLength()
- {
- var segments = new List();
- var body = new CachedResponseBody(segments, 42);
-
- Assert.Equal(42, body.Length);
+ Assert.True(body.IsEmpty);
+ RecyclableReadOnlySequenceSegment.RecycleChain(body);
}
[Fact]
@@ -34,17 +26,18 @@ public async Task Copy_DoNothingWhenNoSegments()
{
var segments = new List();
var receivedSegments = new List();
- var body = new CachedResponseBody(segments, 0);
+ var body = RecyclableReadOnlySequenceSegment.CreateSequence(segments);
var pipe = new Pipe();
using var cts = new CancellationTokenSource(_timeout);
var receiverTask = ReceiveDataAsync(pipe.Reader, receivedSegments, cts.Token);
- var copyTask = body.CopyToAsync(pipe.Writer, cts.Token).ContinueWith(_ => pipe.Writer.CompleteAsync());
+ var copyTask = RecyclableReadOnlySequenceSegment.CopyToAsync(body, pipe.Writer, cts.Token).AsTask().ContinueWith(t => pipe.Writer.CompleteAsync(t.Exception));
await Task.WhenAll(receiverTask, copyTask);
Assert.Empty(receivedSegments);
+ RecyclableReadOnlySequenceSegment.RecycleChain(body);
}
[Fact]
@@ -55,7 +48,7 @@ public async Task Copy_SingleSegment()
new byte[] { 1 }
};
var receivedSegments = new List();
- var body = new CachedResponseBody(segments, 0);
+ var body = RecyclableReadOnlySequenceSegment.CreateSequence(segments);
var pipe = new Pipe();
@@ -67,6 +60,7 @@ public async Task Copy_SingleSegment()
await Task.WhenAll(receiverTask, copyTask);
Assert.Equal(segments, receivedSegments);
+ RecyclableReadOnlySequenceSegment.RecycleChain(body);
}
[Fact]
@@ -78,7 +72,7 @@ public async Task Copy_MultipleSegments()
new byte[] { 2, 3 }
};
var receivedSegments = new List();
- var body = new CachedResponseBody(segments, 0);
+ var body = RecyclableReadOnlySequenceSegment.CreateSequence(segments);
var pipe = new Pipe();
@@ -90,11 +84,12 @@ public async Task Copy_MultipleSegments()
await Task.WhenAll(receiverTask, copyTask);
Assert.Equal(new byte[] { 1, 2, 3 }, receivedSegments.SelectMany(x => x).ToArray());
+ RecyclableReadOnlySequenceSegment.RecycleChain(body);
}
- static async Task CopyDataAsync(CachedResponseBody body, PipeWriter writer, CancellationToken cancellationToken)
+ static async Task CopyDataAsync(ReadOnlySequence body, PipeWriter writer, CancellationToken cancellationToken)
{
- await body.CopyToAsync(writer, cancellationToken);
+ await RecyclableReadOnlySequenceSegment.CopyToAsync(body, writer, cancellationToken);
await writer.CompleteAsync();
}
diff --git a/src/Middleware/OutputCaching/test/OutputCacheEntryFormatterTests.cs b/src/Middleware/OutputCaching/test/OutputCacheEntryFormatterTests.cs
index 2b11fdb871e5..4c62bb27e059 100644
--- a/src/Middleware/OutputCaching/test/OutputCacheEntryFormatterTests.cs
+++ b/src/Middleware/OutputCaching/test/OutputCacheEntryFormatterTests.cs
@@ -1,6 +1,7 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
+using System.Buffers;
using Microsoft.AspNetCore.Http;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Net.Http.Headers;
@@ -9,21 +10,17 @@ namespace Microsoft.AspNetCore.OutputCaching.Tests;
public class OutputCacheEntryFormatterTests
{
- private static CachedResponseBody EmptyResponseBody = new(new List(), 0);
+ // arbitrarily some time 17 May 2023 - so we can predict payloads
+ static readonly DateTimeOffset KnownTime = DateTimeOffset.FromUnixTimeMilliseconds(1684322693875);
[Fact]
public async Task StoreAndGet_StoresEmptyValues()
{
var store = new TestOutputCache();
var key = "abc";
- var entry = new OutputCacheEntry()
- {
- Body = new CachedResponseBody(new List(), 0),
- Headers = new HeaderDictionary(),
- Tags = Array.Empty()
- };
+ using var entry = new OutputCacheEntry(KnownTime, StatusCodes.Status200OK);
- await OutputCacheEntryFormatter.StoreAsync(key, entry, TimeSpan.Zero, store, NullLogger.Instance, default);
+ await OutputCacheEntryFormatter.StoreAsync(key, entry, null, TimeSpan.Zero, store, NullLogger.Instance, default);
var result = await OutputCacheEntryFormatter.GetAsync(key, store, default);
@@ -38,80 +35,158 @@ public async Task StoreAndGet_StoresAllValues()
var store = new TestOutputCache();
var key = "abc";
- var entry = new OutputCacheEntry()
+ using (var entry = new OutputCacheEntry(KnownTime, StatusCodes.Status201Created)
+ .CopyHeadersFrom(new HeaderDictionary { [HeaderNames.Accept] = new[] { "text/plain", "text/html" }, [HeaderNames.AcceptCharset] = "utf8" })
+ .CreateBodyFrom(new[] { bodySegment1, bodySegment1 }))
{
- Body = new CachedResponseBody(new List() { bodySegment1, bodySegment2 }, bodySegment1.Length + bodySegment2.Length),
- Created = DateTimeOffset.UtcNow,
- Headers = new HeaderDictionary { [HeaderNames.Accept] = new[] { "text/plain", "text/html" }, [HeaderNames.AcceptCharset] = "utf8" },
- StatusCode = StatusCodes.Status201Created,
- Tags = new[] { "tag", "タグ" }
- };
-
- await OutputCacheEntryFormatter.StoreAsync(key, entry, TimeSpan.Zero, store, NullLogger.Instance, default);
-
- var result = await OutputCacheEntryFormatter.GetAsync(key, store, default);
+ await OutputCacheEntryFormatter.StoreAsync(key, entry, new HashSet() { "tag", "タグ" }, TimeSpan.Zero, store, NullLogger.Instance, default);
+ var result = await OutputCacheEntryFormatter.GetAsync(key, store, default);
- AssertEntriesAreSame(entry, result);
+ AssertEntriesAreSame(entry, result);
+ }
}
[Fact]
- public async Task StoreAndGet_StoresNullTags()
+
+ public async Task StoreAndGet_StoresNullHeaders()
{
var store = new TestOutputCache();
var key = "abc";
- var entry = new OutputCacheEntry()
+
+ using (var entry = new OutputCacheEntry(KnownTime, StatusCodes.Status201Created))
{
- Body = EmptyResponseBody,
- Headers = new HeaderDictionary(),
- Tags = new[] { null, null, "", "tag" }
- };
+ entry.CopyHeadersFrom(new HeaderDictionary { [""] = "", [HeaderNames.Accept] = new[] { null, null, "", "text/html" }, [HeaderNames.AcceptCharset] = new string[] { null } });
- await OutputCacheEntryFormatter.StoreAsync(key, entry, TimeSpan.Zero, store, NullLogger.Instance, default);
+ await OutputCacheEntryFormatter.StoreAsync(key, entry, null, TimeSpan.Zero, store, NullLogger.Instance, default);
+ }
+ var payload = await store.GetAsync(key, CancellationToken.None);
+ Assert.NotNull(payload);
+ var hex = BitConverter.ToString(payload);
+ Assert.Equal(KnownV2Payload, hex);
var result = await OutputCacheEntryFormatter.GetAsync(key, store, default);
- Assert.Equal(4, result.Tags.Length);
- Assert.Equal("", result.Tags[0]);
- Assert.Equal("", result.Tags[1]);
- Assert.Equal("", result.Tags[2]);
- Assert.Equal("tag", result.Tags[3]);
+ Assert.Equal(3, result.Headers.Length);
+ Assert.True(result.TryFindHeader("", out var values), "Find ''");
+ Assert.Equal("", values);
+ Assert.True(result.TryFindHeader(HeaderNames.Accept, out values));
+ Assert.Equal(4, values.Count);
+ Assert.Equal("", values[0]);
+ Assert.Equal("", values[1]);
+ Assert.Equal("", values[2]);
+ Assert.Equal("text/html", values[3]);
+ Assert.True(result.TryFindHeader(HeaderNames.AcceptCharset, out values), "Find 'AcceptCharset'");
+ Assert.Equal("", values[0]);
}
[Fact]
- public async Task StoreAndGet_StoresNullHeaders()
+ public void KnownV1AndV2AreCompatible()
{
- var store = new TestOutputCache();
- var key = "abc";
- var entry = new OutputCacheEntry()
+ AssertEntriesAreSame(
+ OutputCacheEntryFormatter.Deserialize(FromHex(KnownV1Payload)),
+ OutputCacheEntryFormatter.Deserialize(FromHex(KnownV2Payload))
+ );
+ }
+ static byte[] FromHex(string hex)
+ {
+ // inefficient; for testing only
+ hex = hex.Replace("-", "");
+ var arr = new byte[hex.Length / 2];
+ int index = 0;
+ for (int i = 0; i < arr.Length; i++)
{
- Body = EmptyResponseBody,
- Headers = new HeaderDictionary { [""] = "", [HeaderNames.Accept] = new[] { null, null, "", "text/html" }, [HeaderNames.AcceptCharset] = new string[] { null } },
- Tags = Array.Empty()
- };
-
- await OutputCacheEntryFormatter.StoreAsync(key, entry, TimeSpan.Zero, store, NullLogger.Instance, default);
+ arr[i] = (byte)((Nibble(hex[index++]) << 4) | Nibble(hex[index++]));
+ }
+ return arr;
- var result = await OutputCacheEntryFormatter.GetAsync(key, store, default);
-
- Assert.Equal(3, result.Headers.Count);
- Assert.Equal("", result.Headers[""]);
- Assert.Equal(4, result.Headers[HeaderNames.Accept].Count);
- Assert.Equal("", result.Headers[HeaderNames.Accept][0]);
- Assert.Equal("", result.Headers[HeaderNames.Accept][1]);
- Assert.Equal("", result.Headers[HeaderNames.Accept][2]);
- Assert.Equal("text/html", result.Headers[HeaderNames.Accept][3]);
- Assert.Equal("", result.Headers[HeaderNames.AcceptCharset][0]);
+ static int Nibble(char value)
+ {
+ return value switch
+ {
+ >= '0' and <= '9' => value - '0',
+ >= 'a' and <= 'f' => value - 'a' + 10,
+ >= 'A' and <= 'F' => value - 'A' + 10,
+ _ => throw new ArgumentOutOfRangeException(nameof(value), "token is not hex: " + value.ToString())
+ };
+ }
}
+ const string KnownV1Payload = "01-B0-E8-8E-B2-95-D9-D5-ED-08-00-C9-01-03-00-01-00-06-41-63-63-65-70-74-04-00-00-00-09-74-65-78-74-2F-68-74-6D-6C-0E-41-63-63-65-70-74-2D-43-68-61-72-73-65-74-01-00-00-00";
+ // 01 version 1
+ // B0-E8-8E-B2-95-D9-D5-ED-08 ticks 1684322693875
+ // 00 offset 0
+ // C9-01 status 201
+ // 03 headers 3
+ // 00 [0] header name ""
+ // 01 [0] header value count 1
+ // 00 [0.0] header value ""
+ // 06-41-63-63-65-70-74 [1] header name "Accept"
+ // 04 [1] header value count 4
+ // 00-00-00 [1.0, 1.1, 1.2] header value ""
+ // 09-74-65-78-74-2F-68-74-6D-6C [1.3] header value "text/html"
+ // 0E-41-63-63-65-70-74-2D-43-68-61-72-73-65-74 [2] header name "Accept-Charset"
+ // 01 [2] header value count 1
+ // 00 [2.0] header value ""
+ // 00 segment count 0
+ // 00 tag count 0
+
+ const string KnownV2Payload = "02-B0-E8-8E-B2-95-D9-D5-ED-08-00-C9-01-03-00-01-00-01-04-00-00-00-9B-01-03-01-00-00";
+ // 02 version 2
+ // B0-E8-8E-B2-95-D9-D5-ED-08 ticks 1684322693875
+ // 00 offset 0
+ // C9-01 status 201
+ // 03 headers 3
+ // 00 [0] header name ""
+ // 01 [0] header value count 1
+ // 00 [0.0] header value ""
+ // 01 [1] header name "Accept"
+ // 04 [1] header value count 4
+ // 00-00-00 [1.0, 1.1, 1.2] header value ""
+ // 9B-01 [1.3] header value "text/html"
+ // 03 [2] header name "Accept-Charset"
+ // 01 [2] header value count 1
+ // 00 [2.0] header value ""
+ // 00 segment count 0
+
private static void AssertEntriesAreSame(OutputCacheEntry expected, OutputCacheEntry actual)
{
Assert.NotNull(expected);
Assert.NotNull(actual);
- Assert.Equal(expected.Tags, actual.Tags);
Assert.Equal(expected.Created, actual.Created);
Assert.Equal(expected.StatusCode, actual.StatusCode);
- Assert.Equal(expected.Headers, actual.Headers);
+ Assert.True(expected.Headers.Span.SequenceEqual(actual.Headers.Span), "Headers");
Assert.Equal(expected.Body.Length, actual.Body.Length);
- Assert.Equal(expected.Body.Segments, actual.Body.Segments);
+ Assert.True(SequenceEqual(expected.Body, actual.Body));
+ }
+
+ static bool SequenceEqual(ReadOnlySequence x, ReadOnlySequence y)
+ {
+ var xLinear = Linearize(x, out var xBuffer);
+ var yLinear = Linearize(x, out var yBuffer);
+
+ var result = xLinear.Span.SequenceEqual(yLinear.Span);
+
+ if (xBuffer is not null)
+ {
+ ArrayPool.Shared.Return(xBuffer);
+ }
+ if (yBuffer is not null)
+ {
+ ArrayPool.Shared.Return(yBuffer);
+ }
+
+ return result;
+
+ static ReadOnlyMemory Linearize(in ReadOnlySequence value, out byte[] lease)
+ {
+ lease = null;
+ if (value.IsEmpty) { return default; }
+ if (value.IsSingleSegment) { return value.First; }
+
+ var len = checked((int)value.Length);
+ lease = ArrayPool.Shared.Rent(len);
+ value.CopyTo(lease);
+ return new ReadOnlyMemory(lease, 0, len);
+ }
}
}
diff --git a/src/Middleware/OutputCaching/test/OutputCacheMiddlewareTests.cs b/src/Middleware/OutputCaching/test/OutputCacheMiddlewareTests.cs
index e268ab91b1d3..a2dd6cb70499 100644
--- a/src/Middleware/OutputCaching/test/OutputCacheMiddlewareTests.cs
+++ b/src/Middleware/OutputCaching/test/OutputCacheMiddlewareTests.cs
@@ -61,17 +61,17 @@ public async Task TryServeFromCacheAsync_CachedResponseFound_Succeeds()
var context = TestUtils.CreateTestContext(cache: cache);
middleware.TryGetRequestPolicies(context.HttpContext, out var policies);
- await OutputCacheEntryFormatter.StoreAsync(
- "BaseKey",
- new OutputCacheEntry()
- {
- Headers = new HeaderDictionary(),
- Body = new CachedResponseBody(new List(0), 0)
- },
- TimeSpan.Zero,
- cache,
- NullLogger.Instance,
- default);
+ using (var entry = new OutputCacheEntry(DateTimeOffset.UtcNow, StatusCodes.Status200OK))
+ {
+ await OutputCacheEntryFormatter.StoreAsync(
+ "BaseKey",
+ entry,
+ null,
+ TimeSpan.Zero,
+ cache,
+ NullLogger.Instance,
+ default);
+ }
Assert.True(await middleware.TryServeFromCacheAsync(context, policies));
Assert.Equal(1, cache.GetCount);
@@ -91,20 +91,17 @@ public async Task TryServeFromCacheAsync_CachedResponseFound_OverwritesExistingH
context.CacheKey = "BaseKey";
context.HttpContext.Response.Headers["MyHeader"] = "OldValue";
- await OutputCacheEntryFormatter.StoreAsync(context.CacheKey,
- new OutputCacheEntry()
- {
- Headers = new HeaderDictionary()
- {
- { "MyHeader", "NewValue" }
- },
- Body = new CachedResponseBody(new List(0), 0)
- },
- TimeSpan.Zero,
- cache,
- NullLogger.Instance,
- default);
-
+ using (var entry = new OutputCacheEntry(DateTimeOffset.UtcNow, StatusCodes.Status200OK)
+ .CopyHeadersFrom(new HeaderDictionary() { { "MyHeader", "NewValue" } }))
+ {
+ await OutputCacheEntryFormatter.StoreAsync(context.CacheKey,
+ entry,
+ null,
+ TimeSpan.Zero,
+ cache,
+ NullLogger.Instance,
+ default);
+ }
Assert.True(await middleware.TryServeFromCacheAsync(context, policies));
Assert.Equal("NewValue", context.HttpContext.Response.Headers["MyHeader"]);
Assert.Equal(1, cache.GetCount);
@@ -123,16 +120,16 @@ public async Task TryServeFromCacheAsync_CachedResponseFound_Serves304IfPossible
context.HttpContext.Request.Headers.IfNoneMatch = "*";
middleware.TryGetRequestPolicies(context.HttpContext, out var policies);
- await OutputCacheEntryFormatter.StoreAsync("BaseKey",
- new OutputCacheEntry()
- {
- Body = new CachedResponseBody(new List(0), 0),
- Headers = new()
- },
- TimeSpan.Zero,
- cache,
- NullLogger.Instance,
- default);
+ using (var entry = new OutputCacheEntry(DateTimeOffset.UtcNow, StatusCodes.Status200OK))
+ {
+ await OutputCacheEntryFormatter.StoreAsync("BaseKey",
+ entry,
+ null,
+ TimeSpan.Zero,
+ cache,
+ NullLogger.Instance,
+ default);
+ }
Assert.True(await middleware.TryServeFromCacheAsync(context, policies));
Assert.Equal(1, cache.GetCount);
@@ -149,7 +146,8 @@ public void ContentIsNotModified_NotConditionalRequest_False()
var sink = new TestSink();
var middleware = TestUtils.CreateTestMiddleware(testSink: sink, cache: cache);
var context = TestUtils.CreateTestContext(testSink: sink);
- context.CachedResponse = new OutputCacheEntry { Headers = new HeaderDictionary() };
+ using var entry = new OutputCacheEntry(DateTimeOffset.UtcNow, StatusCodes.Status200OK);
+ context.CachedResponse = entry;
Assert.False(middleware.ContentIsNotModified(context));
Assert.Empty(sink.Writes);
@@ -162,22 +160,28 @@ public void ContentIsNotModified_IfModifiedSince_FallsBackToDateHeader()
var sink = new TestSink();
var context = TestUtils.CreateTestContext(testSink: sink);
var middleware = TestUtils.CreateTestMiddleware(testSink: sink);
- context.CachedResponse = new OutputCacheEntry { Headers = new HeaderDictionary() };
+ using var entry = new OutputCacheEntry(DateTimeOffset.UtcNow, StatusCodes.Status200OK);
+ context.CachedResponse = entry;
context.HttpContext.Request.Headers.IfModifiedSince = HeaderUtilities.FormatDate(utcNow);
+ static void SetDateHeader(OutputCacheEntry entry, DateTimeOffset value)
+ {
+ entry.CopyHeadersFrom(new HeaderDictionary { [HeaderNames.Date] = HeaderUtilities.FormatDate(value) });
+ }
+
// Verify modifications in the past succeeds
- context.CachedResponse.Headers[HeaderNames.Date] = HeaderUtilities.FormatDate(utcNow - TimeSpan.FromSeconds(10));
+ SetDateHeader(context.CachedResponse, utcNow - TimeSpan.FromSeconds(10));
Assert.True(middleware.ContentIsNotModified(context));
Assert.Single(sink.Writes);
// Verify modifications at present succeeds
- context.CachedResponse.Headers[HeaderNames.Date] = HeaderUtilities.FormatDate(utcNow);
+ SetDateHeader(context.CachedResponse, utcNow);
Assert.True(middleware.ContentIsNotModified(context));
Assert.Equal(2, sink.Writes.Count);
// Verify modifications in the future fails
- context.CachedResponse.Headers[HeaderNames.Date] = HeaderUtilities.FormatDate(utcNow + TimeSpan.FromSeconds(10));
+ SetDateHeader(context.CachedResponse, utcNow + TimeSpan.FromSeconds(10));
Assert.False(middleware.ContentIsNotModified(context));
// Verify logging
@@ -194,25 +198,32 @@ public void ContentIsNotModified_IfModifiedSince_LastModifiedOverridesDateHeader
var sink = new TestSink();
var middleware = TestUtils.CreateTestMiddleware(testSink: sink);
var context = TestUtils.CreateTestContext(testSink: sink);
- context.CachedResponse = new OutputCacheEntry { Headers = new HeaderDictionary() };
+ using var entry = new OutputCacheEntry(DateTimeOffset.UtcNow, StatusCodes.Status200OK);
+ context.CachedResponse = entry;
context.HttpContext.Request.Headers.IfModifiedSince = HeaderUtilities.FormatDate(utcNow);
+ static void SetDateHeaders(OutputCacheEntry entry, DateTimeOffset date, DateTimeOffset lastModified)
+ {
+ entry.CopyHeadersFrom(new HeaderDictionary
+ {
+ [HeaderNames.Date] = HeaderUtilities.FormatDate(date),
+ [HeaderNames.LastModified] = HeaderUtilities.FormatDate(lastModified),
+ });
+ }
+
// Verify modifications in the past succeeds
- context.CachedResponse.Headers[HeaderNames.Date] = HeaderUtilities.FormatDate(utcNow + TimeSpan.FromSeconds(10));
- context.CachedResponse.Headers[HeaderNames.LastModified] = HeaderUtilities.FormatDate(utcNow - TimeSpan.FromSeconds(10));
+ SetDateHeaders(context.CachedResponse, utcNow + TimeSpan.FromSeconds(10), utcNow - TimeSpan.FromSeconds(10));
Assert.True(middleware.ContentIsNotModified(context));
Assert.Single(sink.Writes);
// Verify modifications at present
- context.CachedResponse.Headers[HeaderNames.Date] = HeaderUtilities.FormatDate(utcNow + TimeSpan.FromSeconds(10));
- context.CachedResponse.Headers[HeaderNames.LastModified] = HeaderUtilities.FormatDate(utcNow);
+ SetDateHeaders(context.CachedResponse, utcNow + TimeSpan.FromSeconds(10), utcNow);
Assert.True(middleware.ContentIsNotModified(context));
Assert.Equal(2, sink.Writes.Count);
// Verify modifications in the future fails
- context.CachedResponse.Headers[HeaderNames.Date] = HeaderUtilities.FormatDate(utcNow - TimeSpan.FromSeconds(10));
- context.CachedResponse.Headers[HeaderNames.LastModified] = HeaderUtilities.FormatDate(utcNow + TimeSpan.FromSeconds(10));
+ SetDateHeaders(context.CachedResponse, utcNow - TimeSpan.FromSeconds(10), utcNow + TimeSpan.FromSeconds(10));
Assert.False(middleware.ContentIsNotModified(context));
// Verify logging
@@ -229,11 +240,12 @@ public void ContentIsNotModified_IfNoneMatch_Overrides_IfModifiedSince_ToTrue()
var sink = new TestSink();
var middleware = TestUtils.CreateTestMiddleware(testSink: sink);
var context = TestUtils.CreateTestContext(testSink: sink);
- context.CachedResponse = new OutputCacheEntry { Headers = new HeaderDictionary() };
+ using var entry = new OutputCacheEntry(DateTimeOffset.UtcNow, StatusCodes.Status200OK);
+ context.CachedResponse = entry;
// This would fail the IfModifiedSince checks
context.HttpContext.Request.Headers.IfModifiedSince = HeaderUtilities.FormatDate(utcNow);
- context.CachedResponse.Headers[HeaderNames.LastModified] = HeaderUtilities.FormatDate(utcNow + TimeSpan.FromSeconds(10));
+ entry.CopyHeadersFrom(new HeaderDictionary { [HeaderNames.LastModified] = HeaderUtilities.FormatDate(utcNow + TimeSpan.FromSeconds(10)) });
context.HttpContext.Request.Headers.IfNoneMatch = EntityTagHeaderValue.Any.ToString();
Assert.True(middleware.ContentIsNotModified(context));
@@ -249,11 +261,12 @@ public void ContentIsNotModified_IfNoneMatch_Overrides_IfModifiedSince_ToFalse()
var sink = new TestSink();
var middleware = TestUtils.CreateTestMiddleware(testSink: sink);
var context = TestUtils.CreateTestContext(testSink: sink);
- context.CachedResponse = new OutputCacheEntry { Headers = new HeaderDictionary() };
+ using var entry = new OutputCacheEntry(DateTimeOffset.UtcNow, StatusCodes.Status200OK);
+ context.CachedResponse = entry;
// This would pass the IfModifiedSince checks
context.HttpContext.Request.Headers.IfModifiedSince = HeaderUtilities.FormatDate(utcNow);
- context.CachedResponse.Headers[HeaderNames.LastModified] = HeaderUtilities.FormatDate(utcNow - TimeSpan.FromSeconds(10));
+ context.CachedResponse.CopyHeadersFrom(new HeaderDictionary { [HeaderNames.LastModified] = HeaderUtilities.FormatDate(utcNow - TimeSpan.FromSeconds(10)) });
context.HttpContext.Request.Headers.IfNoneMatch = "\"E1\"";
Assert.False(middleware.ContentIsNotModified(context));
@@ -266,7 +279,8 @@ public void ContentIsNotModified_IfNoneMatch_AnyWithoutETagInResponse_False()
var sink = new TestSink();
var middleware = TestUtils.CreateTestMiddleware(testSink: sink);
var context = TestUtils.CreateTestContext(testSink: sink);
- context.CachedResponse = new OutputCacheEntry { Headers = new HeaderDictionary() };
+ using var entry = new OutputCacheEntry(DateTimeOffset.UtcNow, StatusCodes.Status200OK);
+ context.CachedResponse = entry;
context.HttpContext.Request.Headers.IfNoneMatch = "\"E1\"";
Assert.False(middleware.ContentIsNotModified(context));
@@ -294,8 +308,9 @@ public void ContentIsNotModified_IfNoneMatch_ExplicitWithMatch_True(EntityTagHea
var sink = new TestSink();
var middleware = TestUtils.CreateTestMiddleware(testSink: sink);
var context = TestUtils.CreateTestContext(testSink: sink);
- context.CachedResponse = new OutputCacheEntry { Headers = new HeaderDictionary() };
- context.CachedResponse.Headers[HeaderNames.ETag] = responseETag.ToString();
+ using var entry = new OutputCacheEntry(DateTimeOffset.UtcNow, StatusCodes.Status200OK)
+ .CopyHeadersFrom(new HeaderDictionary { [HeaderNames.ETag] = responseETag.ToString() });
+ context.CachedResponse = entry;
context.HttpContext.Request.Headers.IfNoneMatch = requestETag.ToString();
Assert.True(middleware.ContentIsNotModified(context));
@@ -310,8 +325,9 @@ public void ContentIsNotModified_IfNoneMatch_ExplicitWithoutMatch_False()
var sink = new TestSink();
var middleware = TestUtils.CreateTestMiddleware(testSink: sink);
var context = TestUtils.CreateTestContext(testSink: sink);
- context.CachedResponse = new OutputCacheEntry { Headers = new HeaderDictionary() };
- context.CachedResponse.Headers[HeaderNames.ETag] = "\"E2\"";
+ using var entry = new OutputCacheEntry(DateTimeOffset.UtcNow, StatusCodes.Status200OK);
+ context.CachedResponse = entry;
+ context.CachedResponse.CopyHeadersFrom(new HeaderDictionary { [HeaderNames.ETag] = "\"E2\"" });
context.HttpContext.Request.Headers.IfNoneMatch = "\"E1\"";
Assert.False(middleware.ContentIsNotModified(context));
@@ -324,8 +340,9 @@ public void ContentIsNotModified_IfNoneMatch_MatchesAtLeastOneValue_True()
var sink = new TestSink();
var middleware = TestUtils.CreateTestMiddleware(testSink: sink);
var context = TestUtils.CreateTestContext(testSink: sink);
- context.CachedResponse = new OutputCacheEntry { Headers = new HeaderDictionary() };
- context.CachedResponse.Headers[HeaderNames.ETag] = "\"E2\"";
+ using var entry = new OutputCacheEntry(DateTimeOffset.UtcNow, StatusCodes.Status200OK);
+ context.CachedResponse = entry;
+ context.CachedResponse.CopyHeadersFrom(new HeaderDictionary { [HeaderNames.ETag] = "\"E2\"" });
context.HttpContext.Request.Headers.IfNoneMatch = new string[] { "\"E0\", \"E1\"", "\"E1\", \"E2\"" };
Assert.True(middleware.ContentIsNotModified(context));
@@ -566,7 +583,7 @@ public void FinalizeCacheHeadersAsync_StoresHeaders()
middleware.FinalizeCacheHeaders(context);
- Assert.Equal(new StringValues(new[] { "HeaderB, heaDera" }), context.CachedResponse.Headers[HeaderNames.Vary]);
+ Assert.Equal(new StringValues(new[] { "HeaderB, heaDera" }), context.CachedResponse.FindHeader(HeaderNames.Vary));
}
[Fact]
@@ -582,7 +599,8 @@ public async Task FinalizeCacheBody_Cache_IfContentLengthMatches()
await context.HttpContext.Response.WriteAsync(new string('0', 20));
- context.CachedResponse = new OutputCacheEntry { Headers = new() };
+ using var entry = new OutputCacheEntry(DateTimeOffset.UtcNow, StatusCodes.Status200OK);
+ context.CachedResponse = entry;
context.CacheKey = "BaseKey";
context.CachedResponseValidFor = TimeSpan.FromSeconds(10);
@@ -610,7 +628,8 @@ public async Task FinalizeCacheBody_DoNotCache_IfContentLengthMismatches(string
await context.HttpContext.Response.WriteAsync(new string('0', 10));
- context.CachedResponse = new OutputCacheEntry();
+ using var entry = new OutputCacheEntry(DateTimeOffset.UtcNow, StatusCodes.Status200OK);
+ context.CachedResponse = entry;
context.CacheKey = "BaseKey";
context.CachedResponseValidFor = TimeSpan.FromSeconds(10);
@@ -642,7 +661,8 @@ public async Task FinalizeCacheBody_RequestHead_Cache_IfContentLengthPresent_And
await context.HttpContext.Response.WriteAsync(new string('0', 10));
}
- context.CachedResponse = new OutputCacheEntry { Headers = new() };
+ using var entry = new OutputCacheEntry(DateTimeOffset.UtcNow, StatusCodes.Status200OK);
+ context.CachedResponse = entry;
context.CacheKey = "BaseKey";
context.CachedResponseValidFor = TimeSpan.FromSeconds(10);
@@ -666,7 +686,8 @@ public async Task FinalizeCacheBody_Cache_IfContentLengthAbsent()
await context.HttpContext.Response.WriteAsync(new string('0', 10));
- context.CachedResponse = new OutputCacheEntry { Headers = new HeaderDictionary() };
+ using var entry = new OutputCacheEntry(DateTimeOffset.UtcNow, StatusCodes.Status200OK);
+ context.CachedResponse = entry;
context.CacheKey = "BaseKey";
context.CachedResponseValidFor = TimeSpan.FromSeconds(10);
@@ -737,7 +758,8 @@ public async Task FinalizeCacheBody_DoNotCache_IfSizeTooBig()
await context.HttpContext.Response.WriteAsync(new string('0', 101));
- context.CachedResponse = new OutputCacheEntry() { Headers = new HeaderDictionary() };
+ using var entry = new OutputCacheEntry(DateTimeOffset.UtcNow, StatusCodes.Status200OK);
+ context.CachedResponse = entry;
context.CacheKey = "BaseKey";
context.CachedResponseValidFor = TimeSpan.FromSeconds(10);
@@ -939,7 +961,8 @@ public async Task EmptyCacheKey_IsNotCached()
// A response to HEAD should not include a body, but it may be present
await context.HttpContext.Response.WriteAsync("Hello");
- context.CachedResponse = new OutputCacheEntry { Headers = new() };
+ using var entry = new OutputCacheEntry(DateTimeOffset.UtcNow, StatusCodes.Status200OK);
+ context.CachedResponse = entry;
context.CacheKey = "";
context.CachedResponseValidFor = TimeSpan.FromSeconds(10);
diff --git a/src/Middleware/OutputCaching/test/TestUtils.cs b/src/Middleware/OutputCaching/test/TestUtils.cs
index b745adae97dc..324be5f91564 100644
--- a/src/Middleware/OutputCaching/test/TestUtils.cs
+++ b/src/Middleware/OutputCaching/test/TestUtils.cs
@@ -2,7 +2,6 @@
// The .NET Foundation licenses this file to you under the MIT license.
#nullable enable
-using System;
using System.Net.Http;
using System.Text;
using Microsoft.AspNetCore.Builder;
@@ -77,6 +76,16 @@ internal static Task TestRequestDelegateWrite(HttpContext context)
return Task.CompletedTask;
}
+ internal static async Task TestRequestDelegatePipeWriteAsync(HttpContext context)
+ {
+ var uniqueId = Guid.NewGuid().ToString();
+ if (TestRequestDelegate(context, uniqueId))
+ {
+ Encoding.UTF8.GetBytes(uniqueId, context.Response.BodyWriter);
+ await context.Response.BodyWriter.FlushAsync();
+ }
+ }
+
internal static IOutputCacheKeyProvider CreateTestKeyProvider()
{
return CreateTestKeyProvider(new OutputCacheOptions());
@@ -109,6 +118,11 @@ internal static IEnumerable CreateBuildersWithOutputCaching(
contextAction?.Invoke(context);
return TestRequestDelegateSendFileAsync(context);
},
+ context =>
+ {
+ contextAction?.Invoke(context);
+ return TestRequestDelegatePipeWriteAsync(context);
+ },
});
}
@@ -125,8 +139,9 @@ private static IEnumerable CreateBuildersWithOutputCaching(
{
requestDelegates = new RequestDelegate[]
{
- TestRequestDelegateWriteAsync,
- TestRequestDelegateWrite
+ TestRequestDelegateWriteAsync,
+ TestRequestDelegateWrite,
+ TestRequestDelegatePipeWriteAsync,
};
}