@@ -35,7 +35,7 @@ func BenchmarkNewChunkMergeIterator_CreateAndIterate(b *testing.B) {
35
35
scenario .duplicationFactor ,
36
36
scenario .enc .String ())
37
37
38
- chunks := createChunks (b , scenario .numChunks , scenario .numSamplesPerChunk , scenario .duplicationFactor , scenario .enc )
38
+ chunks := createChunks (b , step , scenario .numChunks , scenario .numSamplesPerChunk , scenario .duplicationFactor , scenario .enc )
39
39
40
40
b .Run (name , func (b * testing.B ) {
41
41
b .ReportAllocs ()
@@ -55,10 +55,59 @@ func BenchmarkNewChunkMergeIterator_CreateAndIterate(b *testing.B) {
55
55
}
56
56
}
57
57
58
+ func BenchmarkNewChunkMergeIterator_Seek (b * testing.B ) {
59
+ scenarios := []struct {
60
+ numChunks int
61
+ numSamplesPerChunk int
62
+ duplicationFactor int
63
+ seekStep time.Duration
64
+ scrapeInterval time.Duration
65
+ enc promchunk.Encoding
66
+ }{
67
+ {numChunks : 1000 , numSamplesPerChunk : 120 , duplicationFactor : 3 , scrapeInterval : 30 * time .Second , seekStep : 30 * time .Second / 2 , enc : promchunk .PrometheusXorChunk },
68
+ {numChunks : 1000 , numSamplesPerChunk : 120 , duplicationFactor : 3 , scrapeInterval : 30 * time .Second , seekStep : 30 * time .Second , enc : promchunk .PrometheusXorChunk },
69
+ {numChunks : 1000 , numSamplesPerChunk : 120 , duplicationFactor : 3 , scrapeInterval : 30 * time .Second , seekStep : 30 * time .Second * 2 , enc : promchunk .PrometheusXorChunk },
70
+ {numChunks : 1000 , numSamplesPerChunk : 120 , duplicationFactor : 3 , scrapeInterval : 30 * time .Second , seekStep : 30 * time .Second * 10 , enc : promchunk .PrometheusXorChunk },
71
+ {numChunks : 1000 , numSamplesPerChunk : 120 , duplicationFactor : 3 , scrapeInterval : 30 * time .Second , seekStep : 30 * time .Second * 30 , enc : promchunk .PrometheusXorChunk },
72
+ {numChunks : 1000 , numSamplesPerChunk : 120 , duplicationFactor : 3 , scrapeInterval : 30 * time .Second , seekStep : 30 * time .Second * 50 , enc : promchunk .PrometheusXorChunk },
73
+ {numChunks : 1000 , numSamplesPerChunk : 120 , duplicationFactor : 3 , scrapeInterval : 30 * time .Second , seekStep : 30 * time .Second * 100 , enc : promchunk .PrometheusXorChunk },
74
+ {numChunks : 1000 , numSamplesPerChunk : 120 , duplicationFactor : 3 , scrapeInterval : 30 * time .Second , seekStep : 30 * time .Second * 200 , enc : promchunk .PrometheusXorChunk },
75
+
76
+ {numChunks : 1000 , numSamplesPerChunk : 120 , duplicationFactor : 3 , scrapeInterval : 10 * time .Second , seekStep : 10 * time .Second / 2 , enc : promchunk .PrometheusXorChunk },
77
+ {numChunks : 1000 , numSamplesPerChunk : 120 , duplicationFactor : 3 , scrapeInterval : 10 * time .Second , seekStep : 10 * time .Second , enc : promchunk .PrometheusXorChunk },
78
+ {numChunks : 1000 , numSamplesPerChunk : 120 , duplicationFactor : 3 , scrapeInterval : 10 * time .Second , seekStep : 10 * time .Second * 2 , enc : promchunk .PrometheusXorChunk },
79
+ {numChunks : 1000 , numSamplesPerChunk : 120 , duplicationFactor : 3 , scrapeInterval : 10 * time .Second , seekStep : 10 * time .Second * 10 , enc : promchunk .PrometheusXorChunk },
80
+ {numChunks : 1000 , numSamplesPerChunk : 120 , duplicationFactor : 3 , scrapeInterval : 10 * time .Second , seekStep : 10 * time .Second * 30 , enc : promchunk .PrometheusXorChunk },
81
+ {numChunks : 1000 , numSamplesPerChunk : 120 , duplicationFactor : 3 , scrapeInterval : 10 * time .Second , seekStep : 10 * time .Second * 50 , enc : promchunk .PrometheusXorChunk },
82
+ {numChunks : 1000 , numSamplesPerChunk : 120 , duplicationFactor : 3 , scrapeInterval : 10 * time .Second , seekStep : 10 * time .Second * 100 , enc : promchunk .PrometheusXorChunk },
83
+ {numChunks : 1000 , numSamplesPerChunk : 120 , duplicationFactor : 3 , scrapeInterval : 10 * time .Second , seekStep : 10 * time .Second * 200 , enc : promchunk .PrometheusXorChunk },
84
+ }
85
+
86
+ for _ , scenario := range scenarios {
87
+ name := fmt .Sprintf ("scrapeInterval %vs seekStep: %vs" ,
88
+ scenario .scrapeInterval .Seconds (),
89
+ scenario .seekStep .Seconds ())
90
+
91
+ chunks := createChunks (b , scenario .scrapeInterval , scenario .numChunks , scenario .numSamplesPerChunk , scenario .duplicationFactor , scenario .enc )
92
+
93
+ b .Run (name , func (b * testing.B ) {
94
+ b .ReportAllocs ()
95
+
96
+ for n := 0 ; n < b .N ; n ++ {
97
+ it := NewChunkMergeIterator (chunks , 0 , 0 )
98
+ i := int64 (0 )
99
+ for it .Seek (i * scenario .seekStep .Milliseconds ()) != chunkenc .ValNone {
100
+ i ++
101
+ }
102
+ }
103
+ })
104
+ }
105
+ }
106
+
58
107
func TestSeekCorrectlyDealWithSinglePointChunks (t * testing.T ) {
59
108
t .Parallel ()
60
- chunkOne := mkChunk (t , model .Time (1 * step / time .Millisecond ), 1 , promchunk .PrometheusXorChunk )
61
- chunkTwo := mkChunk (t , model .Time (10 * step / time .Millisecond ), 1 , promchunk .PrometheusXorChunk )
109
+ chunkOne := mkChunk (t , step , model .Time (1 * step / time .Millisecond ), 1 , promchunk .PrometheusXorChunk )
110
+ chunkTwo := mkChunk (t , step , model .Time (10 * step / time .Millisecond ), 1 , promchunk .PrometheusXorChunk )
62
111
chunks := []chunk.Chunk {chunkOne , chunkTwo }
63
112
64
113
sut := NewChunkMergeIterator (chunks , 0 , 0 )
@@ -72,13 +121,13 @@ func TestSeekCorrectlyDealWithSinglePointChunks(t *testing.T) {
72
121
require .Equal (t , int64 (1 * time .Second / time .Millisecond ), actual )
73
122
}
74
123
75
- func createChunks (b * testing.B , numChunks , numSamplesPerChunk , duplicationFactor int , enc promchunk.Encoding ) []chunk.Chunk {
124
+ func createChunks (b * testing.B , step time. Duration , numChunks , numSamplesPerChunk , duplicationFactor int , enc promchunk.Encoding ) []chunk.Chunk {
76
125
result := make ([]chunk.Chunk , 0 , numChunks )
77
126
78
127
for d := 0 ; d < duplicationFactor ; d ++ {
79
128
for c := 0 ; c < numChunks ; c ++ {
80
129
minTime := step * time .Duration (c * numSamplesPerChunk )
81
- result = append (result , mkChunk (b , model .Time (minTime .Milliseconds ()), numSamplesPerChunk , enc ))
130
+ result = append (result , mkChunk (b , step , model .Time (minTime .Milliseconds ()), numSamplesPerChunk , enc ))
82
131
}
83
132
}
84
133
0 commit comments