Skip to content

Commit 8b5019b

Browse files
authored
MQE: include trace ID in panic message if memory consumption estimate becomes negative (#11713)
#### What this PR does This PR adds the trace ID (if any) to the panic message used if the estimated memory consumption of a query becomes negative. #### Which issue(s) this PR fixes or relates to #11615, #11654 #### Checklist - [x] Tests updated. - [n/a] Documentation added. - [covered by #10067] `CHANGELOG.md` updated - the order of entries should be `[CHANGE]`, `[FEATURE]`, `[ENHANCEMENT]`, `[BUGFIX]`. - [n/a] [`about-versioning.md`](https://github.com/grafana/mimir/blob/main/docs/sources/mimir/configure/about-versioning.md) updated with experimental features.
1 parent 8d19548 commit 8b5019b

29 files changed

+178
-132
lines changed

pkg/ingester/client/streaming_test.go

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -80,7 +80,7 @@ func TestSeriesChunksStreamReader_HappyPaths(t *testing.T) {
8080
cleanedUp := atomic.NewBool(false)
8181
cleanup := func() { cleanedUp.Store(true) }
8282
queryLimiter := limiter.NewQueryLimiter(0, 0, 0, 0, nil)
83-
memoryTracker := limiter.NewMemoryConsumptionTracker(0, nil, "")
83+
memoryTracker := limiter.NewMemoryConsumptionTracker(ctx, 0, nil, "")
8484
reader := NewSeriesChunksStreamReader(ctx, mockClient, "ingester", 5, queryLimiter, memoryTracker, cleanup, log.NewNopLogger())
8585
reader.StartBuffering()
8686

@@ -123,7 +123,7 @@ func TestSeriesChunksStreamReader_AbortsWhenParentContextCancelled(t *testing.T)
123123

124124
parentCtx, cancel := context.WithCancel(context.Background())
125125
queryLimiter := limiter.NewQueryLimiter(0, 0, 0, 0, nil)
126-
memoryTracker := limiter.NewMemoryConsumptionTracker(0, nil, "")
126+
memoryTracker := limiter.NewMemoryConsumptionTracker(parentCtx, 0, nil, "")
127127
reader := NewSeriesChunksStreamReader(parentCtx, mockClient, "ingester", 3, queryLimiter, memoryTracker, cleanup, log.NewNopLogger())
128128
cancel()
129129
reader.StartBuffering()
@@ -170,7 +170,7 @@ func TestSeriesChunksStreamReader_DoesNotAbortWhenStreamContextCancelled(t *test
170170

171171
parentCtx := context.Background()
172172
queryLimiter := limiter.NewQueryLimiter(0, 0, 0, 0, nil)
173-
memoryTracker := limiter.NewMemoryConsumptionTracker(0, nil, "")
173+
memoryTracker := limiter.NewMemoryConsumptionTracker(parentCtx, 0, nil, "")
174174
reader := NewSeriesChunksStreamReader(parentCtx, mockClient, "ingester", 3, queryLimiter, memoryTracker, cleanup, log.NewNopLogger())
175175
cancel()
176176
reader.StartBuffering()
@@ -195,7 +195,7 @@ func TestSeriesChunksStreamReader_ReadingSeriesOutOfOrder(t *testing.T) {
195195
mockClient := &mockQueryStreamClient{ctx: ctx, batches: batches}
196196
cleanup := func() {}
197197
queryLimiter := limiter.NewQueryLimiter(0, 0, 0, 0, nil)
198-
memoryTracker := limiter.NewMemoryConsumptionTracker(0, nil, "")
198+
memoryTracker := limiter.NewMemoryConsumptionTracker(ctx, 0, nil, "")
199199
reader := NewSeriesChunksStreamReader(ctx, mockClient, "ingester", 1, queryLimiter, memoryTracker, cleanup, log.NewNopLogger())
200200
reader.StartBuffering()
201201

@@ -223,7 +223,7 @@ func TestSeriesChunksStreamReader_ReadingMoreSeriesThanAvailable(t *testing.T) {
223223
mockClient := &mockQueryStreamClient{ctx: ctx, batches: batches}
224224
cleanup := func() {}
225225
queryLimiter := limiter.NewQueryLimiter(0, 0, 0, 0, nil)
226-
memoryTracker := limiter.NewMemoryConsumptionTracker(0, nil, "")
226+
memoryTracker := limiter.NewMemoryConsumptionTracker(ctx, 0, nil, "")
227227
reader := NewSeriesChunksStreamReader(ctx, mockClient, "ingester", 1, queryLimiter, memoryTracker, cleanup, log.NewNopLogger())
228228
reader.StartBuffering()
229229

@@ -256,7 +256,7 @@ func TestSeriesChunksStreamReader_ReceivedFewerSeriesThanExpected(t *testing.T)
256256
cleanedUp := atomic.NewBool(false)
257257
cleanup := func() { cleanedUp.Store(true) }
258258
queryLimiter := limiter.NewQueryLimiter(0, 0, 0, 0, nil)
259-
memoryTracker := limiter.NewMemoryConsumptionTracker(0, nil, "")
259+
memoryTracker := limiter.NewMemoryConsumptionTracker(ctx, 0, nil, "")
260260
reader := NewSeriesChunksStreamReader(ctx, mockClient, "ingester", 3, queryLimiter, memoryTracker, cleanup, log.NewNopLogger())
261261
reader.StartBuffering()
262262

@@ -306,7 +306,7 @@ func TestSeriesChunksStreamReader_ReceivedMoreSeriesThanExpected(t *testing.T) {
306306
cleanedUp := atomic.NewBool(false)
307307
cleanup := func() { cleanedUp.Store(true) }
308308
queryLimiter := limiter.NewQueryLimiter(0, 0, 0, 0, nil)
309-
memoryTracker := limiter.NewMemoryConsumptionTracker(0, nil, "")
309+
memoryTracker := limiter.NewMemoryConsumptionTracker(ctx, 0, nil, "")
310310
reader := NewSeriesChunksStreamReader(ctx, mockClient, "ingester", 1, queryLimiter, memoryTracker, cleanup, log.NewNopLogger())
311311
reader.StartBuffering()
312312

@@ -380,7 +380,7 @@ func TestSeriesChunksStreamReader_QueryAndChunksLimits(t *testing.T) {
380380
})
381381

382382
queryLimiter := limiter.NewQueryLimiter(0, testCase.maxChunkBytes, testCase.maxChunks, 0, queryMetrics)
383-
memoryTracker := limiter.NewMemoryConsumptionTracker(uint64(testCase.maxEstimatedMemory), rejectionCount, "")
383+
memoryTracker := limiter.NewMemoryConsumptionTracker(ctx, uint64(testCase.maxEstimatedMemory), rejectionCount, "")
384384
reader := NewSeriesChunksStreamReader(ctx, mockClient, "ingester", 1, queryLimiter, memoryTracker, cleanup, log.NewNopLogger())
385385
reader.StartBuffering()
386386

pkg/querier/block_streaming_test.go

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -317,7 +317,7 @@ func TestStoreGatewayStreamReader_HappyPaths(t *testing.T) {
317317
ctx := context.Background()
318318
mockClient := &mockStoreGatewayQueryStreamClient{ctx: ctx, messages: testCase.messages}
319319
queryLimiter := limiter.NewQueryLimiter(0, 0, 0, 0, nil)
320-
memoryTracker := limiter.NewMemoryConsumptionTracker(0, nil, "")
320+
memoryTracker := limiter.NewMemoryConsumptionTracker(ctx, 0, nil, "")
321321
metrics := newBlocksStoreQueryableMetrics(prometheus.NewPedanticRegistry())
322322
reader := newStoreGatewayStreamReader(ctx, mockClient, 5, queryLimiter, memoryTracker, &stats.Stats{}, metrics, log.NewNopLogger())
323323
reader.StartBuffering()
@@ -389,7 +389,7 @@ func TestStoreGatewayStreamReader_AbortsWhenParentContextCancelled(t *testing.T)
389389

390390
parentCtx, cancel := context.WithCancel(context.Background())
391391
queryLimiter := limiter.NewQueryLimiter(0, 0, 0, 0, nil)
392-
memoryTracker := limiter.NewMemoryConsumptionTracker(0, nil, "")
392+
memoryTracker := limiter.NewMemoryConsumptionTracker(parentCtx, 0, nil, "")
393393
metrics := newBlocksStoreQueryableMetrics(prometheus.NewPedanticRegistry())
394394
reader := newStoreGatewayStreamReader(parentCtx, mockClient, 3, queryLimiter, memoryTracker, &stats.Stats{}, metrics, log.NewNopLogger())
395395
cancel()
@@ -418,10 +418,10 @@ func TestStoreGatewayStreamReader_DoesNotAbortWhenStreamContextCancelled(t *test
418418
const expectedChunksEstimate uint64 = 5
419419
mockClient := &mockStoreGatewayQueryStreamClient{ctx: streamCtx, messages: batchesToMessages(expectedChunksEstimate, batches...)}
420420
queryLimiter := limiter.NewQueryLimiter(0, 0, 0, 0, nil)
421-
memoryTracker := limiter.NewMemoryConsumptionTracker(0, nil, "")
422421
metrics := newBlocksStoreQueryableMetrics(prometheus.NewPedanticRegistry())
423422

424423
parentCtx := context.Background()
424+
memoryTracker := limiter.NewMemoryConsumptionTracker(parentCtx, 0, nil, "")
425425
reader := newStoreGatewayStreamReader(parentCtx, mockClient, 3, queryLimiter, memoryTracker, &stats.Stats{}, metrics, log.NewNopLogger())
426426
reader.StartBuffering()
427427

@@ -444,7 +444,7 @@ func TestStoreGatewayStreamReader_ReadingSeriesOutOfOrder(t *testing.T) {
444444
ctx := context.Background()
445445
mockClient := &mockStoreGatewayQueryStreamClient{ctx: ctx, messages: batchesToMessages(3, batches...)}
446446
queryLimiter := limiter.NewQueryLimiter(0, 0, 0, 0, nil)
447-
memoryTracker := limiter.NewMemoryConsumptionTracker(0, nil, "")
447+
memoryTracker := limiter.NewMemoryConsumptionTracker(ctx, 0, nil, "")
448448
metrics := newBlocksStoreQueryableMetrics(prometheus.NewPedanticRegistry())
449449
reader := newStoreGatewayStreamReader(ctx, mockClient, 1, queryLimiter, memoryTracker, &stats.Stats{}, metrics, log.NewNopLogger())
450450
reader.StartBuffering()
@@ -463,7 +463,7 @@ func TestStoreGatewayStreamReader_ReadingMoreSeriesThanAvailable(t *testing.T) {
463463
ctx := context.Background()
464464
mockClient := &mockStoreGatewayQueryStreamClient{ctx: ctx, messages: batchesToMessages(3, batches...)}
465465
queryLimiter := limiter.NewQueryLimiter(0, 0, 0, 0, nil)
466-
memoryTracker := limiter.NewMemoryConsumptionTracker(0, nil, "")
466+
memoryTracker := limiter.NewMemoryConsumptionTracker(ctx, 0, nil, "")
467467
metrics := newBlocksStoreQueryableMetrics(prometheus.NewPedanticRegistry())
468468
reader := newStoreGatewayStreamReader(ctx, mockClient, 1, queryLimiter, memoryTracker, &stats.Stats{}, metrics, log.NewNopLogger())
469469
reader.StartBuffering()
@@ -493,7 +493,7 @@ func TestStoreGatewayStreamReader_ReceivedFewerSeriesThanExpected(t *testing.T)
493493
ctx := context.Background()
494494
mockClient := &mockStoreGatewayQueryStreamClient{ctx: ctx, messages: batchesToMessages(3, batches...)}
495495
queryLimiter := limiter.NewQueryLimiter(0, 0, 0, 0, nil)
496-
memoryTracker := limiter.NewMemoryConsumptionTracker(0, nil, "")
496+
memoryTracker := limiter.NewMemoryConsumptionTracker(ctx, 0, nil, "")
497497
metrics := newBlocksStoreQueryableMetrics(prometheus.NewPedanticRegistry())
498498
reader := newStoreGatewayStreamReader(ctx, mockClient, 3, queryLimiter, memoryTracker, &stats.Stats{}, metrics, log.NewNopLogger())
499499
reader.StartBuffering()
@@ -548,7 +548,7 @@ func TestStoreGatewayStreamReader_ReceivedMoreSeriesThanExpected(t *testing.T) {
548548
ctx := context.Background()
549549
mockClient := &mockStoreGatewayQueryStreamClient{ctx: ctx, messages: batchesToMessages(3, batches...)}
550550
queryLimiter := limiter.NewQueryLimiter(0, 0, 0, 0, nil)
551-
memoryTracker := limiter.NewMemoryConsumptionTracker(0, nil, "")
551+
memoryTracker := limiter.NewMemoryConsumptionTracker(ctx, 0, nil, "")
552552
metrics := newBlocksStoreQueryableMetrics(prometheus.NewPedanticRegistry())
553553
reader := newStoreGatewayStreamReader(ctx, mockClient, 1, queryLimiter, memoryTracker, &stats.Stats{}, metrics, log.NewNopLogger())
554554
reader.StartBuffering()
@@ -623,9 +623,9 @@ func TestStoreGatewayStreamReader_QueryAndChunksLimits(t *testing.T) {
623623
})
624624
queryMetrics := stats.NewQueryMetrics(registry)
625625
queryLimiter := limiter.NewQueryLimiter(0, testCase.maxChunkBytes, testCase.maxChunks, 0, queryMetrics)
626-
memoryTracker := limiter.NewMemoryConsumptionTracker(uint64(testCase.maxEstimatedMemory), rejectionCount, "")
627626
metrics := newBlocksStoreQueryableMetrics(registry)
628627

628+
memoryTracker := limiter.NewMemoryConsumptionTracker(ctx, uint64(testCase.maxEstimatedMemory), rejectionCount, "")
629629
reader := newStoreGatewayStreamReader(ctx, mockClient, 1, queryLimiter, memoryTracker, &stats.Stats{}, metrics, log.NewNopLogger())
630630
reader.StartBuffering()
631631

pkg/querier/distributor_queryable_streaming_test.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -159,7 +159,7 @@ func createTestStreamReader(batches ...[]client.QueryStreamSeriesChunks) *client
159159

160160
cleanup := func() {}
161161
queryLimiter := limiter.NewQueryLimiter(0, 0, 0, 0, nil)
162-
memoryTracker := limiter.NewMemoryConsumptionTracker(0, nil, "")
162+
memoryTracker := limiter.NewMemoryConsumptionTracker(ctx, 0, nil, "")
163163

164164
reader := client.NewSeriesChunksStreamReader(ctx, mockClient, "ingester", seriesCount, queryLimiter, memoryTracker, cleanup, log.NewNopLogger())
165165
reader.StartBuffering()

pkg/streamingpromql/operators/aggregations/aggregation_test.go

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -85,7 +85,7 @@ func TestAggregation_ReturnsGroupsFinishedFirstEarliest(t *testing.T) {
8585

8686
for name, testCase := range testCases {
8787
t.Run(name, func(t *testing.T) {
88-
memoryConsumptionTracker := limiter.NewMemoryConsumptionTracker(0, nil, "")
88+
memoryConsumptionTracker := limiter.NewMemoryConsumptionTracker(context.Background(), 0, nil, "")
8989
aggregator := &Aggregation{
9090
Inner: &operators.TestOperator{Series: testCase.inputSeries, MemoryConsumptionTracker: memoryConsumptionTracker},
9191
Grouping: testCase.grouping,
@@ -352,7 +352,8 @@ func TestAggregations_ReturnIncompleteGroupsOnEarlyClose(t *testing.T) {
352352

353353
for name, testCase := range testCases {
354354
t.Run(name, func(t *testing.T) {
355-
memoryConsumptionTracker := limiter.NewMemoryConsumptionTracker(0, nil, "")
355+
ctx := context.Background()
356+
memoryConsumptionTracker := limiter.NewMemoryConsumptionTracker(ctx, 0, nil, "")
356357
timeRange := rangeQueryTimeRange
357358

358359
if testCase.instant {
@@ -374,7 +375,6 @@ func TestAggregations_ReturnIncompleteGroupsOnEarlyClose(t *testing.T) {
374375
o, err := testCase.createOperator(inner, timeRange, memoryConsumptionTracker)
375376
require.NoError(t, err)
376377

377-
ctx := context.Background()
378378
series, err := o.SeriesMetadata(ctx)
379379
require.NoError(t, err)
380380

pkg/streamingpromql/operators/aggregations/aggregations_safety_test.go

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
package aggregations
44

55
import (
6+
"context"
67
"testing"
78
"time"
89

@@ -27,7 +28,7 @@ func TestAggregationGroupNativeHistogramSafety(t *testing.T) {
2728

2829
for name, group := range groups {
2930
t.Run(name, func(t *testing.T) {
30-
memoryConsumptionTracker := limiter.NewMemoryConsumptionTracker(0, nil, "")
31+
memoryConsumptionTracker := limiter.NewMemoryConsumptionTracker(context.Background(), 0, nil, "")
3132
timeRange := types.NewRangeQueryTimeRange(timestamp.Time(0), timestamp.Time(4), time.Millisecond)
3233

3334
// First series: all histograms should be nil-ed out after returning, as they're all retained for use.

pkg/streamingpromql/operators/aggregations/count_values_test.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -208,7 +208,7 @@ func TestCountValues_GroupLabelling(t *testing.T) {
208208

209209
for name, testCase := range testCases {
210210
t.Run(name, func(t *testing.T) {
211-
memoryConsumptionTracker := limiter.NewMemoryConsumptionTracker(0, nil, "")
211+
memoryConsumptionTracker := limiter.NewMemoryConsumptionTracker(context.Background(), 0, nil, "")
212212
floats, err := types.FPointSlicePool.Get(1, memoryConsumptionTracker)
213213
require.NoError(t, err)
214214
floats = append(floats, promql.FPoint{T: 0, F: 123})

pkg/streamingpromql/operators/aggregations/topkbottomk/instant_query_test.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -363,7 +363,7 @@ func TestTopKBottomKInstantQuery_GroupingAndSorting(t *testing.T) {
363363
require.ElementsMatch(t, allExpectedOutputSeries, testCase.inputSeries, "invalid test case: list of input series and all output series do not match")
364364

365365
timeRange := types.NewInstantQueryTimeRange(timestamp.Time(0))
366-
memoryConsumptionTracker := limiter.NewMemoryConsumptionTracker(0, nil, "")
366+
memoryConsumptionTracker := limiter.NewMemoryConsumptionTracker(context.Background(), 0, nil, "")
367367

368368
data := make([]types.InstantVectorSeriesData, 0, len(testCase.inputSeries))
369369
for idx := range testCase.inputSeries {

pkg/streamingpromql/operators/aggregations/topkbottomk/range_query_test.go

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -300,7 +300,8 @@ func TestTopKBottomKRangeQuery_GroupingAndSorting(t *testing.T) {
300300
for name, testCase := range testCases {
301301
t.Run(name, func(t *testing.T) {
302302
timeRange := types.NewRangeQueryTimeRange(timestamp.Time(0), timestamp.Time(0).Add(2*time.Minute), time.Minute)
303-
memoryConsumptionTracker := limiter.NewMemoryConsumptionTracker(0, nil, "")
303+
ctx := context.Background()
304+
memoryConsumptionTracker := limiter.NewMemoryConsumptionTracker(ctx, 0, nil, "")
304305

305306
o := New(
306307
&operators.TestOperator{Series: testCase.inputSeries, MemoryConsumptionTracker: memoryConsumptionTracker},
@@ -314,7 +315,7 @@ func TestTopKBottomKRangeQuery_GroupingAndSorting(t *testing.T) {
314315
posrange.PositionRange{Start: 0, End: 10},
315316
)
316317

317-
outputSeries, err := o.SeriesMetadata(context.Background())
318+
outputSeries, err := o.SeriesMetadata(ctx)
318319
require.NoError(t, err)
319320
require.Equal(t, testutils.LabelsToSeriesMetadata(testCase.expectedOutputSeriesOrder), outputSeries)
320321
})

pkg/streamingpromql/operators/aggregations/topkbottomk/topk_bottomk_test.go

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,8 @@ func TestAggregations_ReturnIncompleteGroupsOnEarlyClose(t *testing.T) {
5050
t.Run(name, func(t *testing.T) {
5151
for name, readSeries := range map[string]bool{"read one series": true, "read no series": false} {
5252
t.Run(name, func(t *testing.T) {
53-
memoryConsumptionTracker := limiter.NewMemoryConsumptionTracker(0, nil, "")
53+
ctx := context.Background()
54+
memoryConsumptionTracker := limiter.NewMemoryConsumptionTracker(ctx, 0, nil, "")
5455

5556
inner := &operators.TestOperator{
5657
Series: inputSeries,
@@ -68,7 +69,6 @@ func TestAggregations_ReturnIncompleteGroupsOnEarlyClose(t *testing.T) {
6869
param := scalars.NewScalarConstant(6, timeRange, memoryConsumptionTracker, posrange.PositionRange{})
6970
o := New(inner, param, timeRange, nil, false, isTopK, memoryConsumptionTracker, annotations.New(), posrange.PositionRange{})
7071

71-
ctx := context.Background()
7272
series, err := o.SeriesMetadata(ctx)
7373
require.NoError(t, err)
7474

pkg/streamingpromql/operators/binops/and_unless_binary_operation_test.go

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -309,14 +309,14 @@ func TestAndUnlessBinaryOperation_ClosesInnerOperatorsAsSoonAsPossible(t *testin
309309
require.Failf(t, "invalid test case", "expectRightSideClosedAfterOutputSeriesIndex %v is beyond end of expected output series %v", testCase.expectRightSideClosedAfterOutputSeriesIndex, testCase.expectedOutputSeries)
310310
}
311311

312+
ctx := context.Background()
312313
timeRange := types.NewInstantQueryTimeRange(time.Now())
313-
memoryConsumptionTracker := limiter.NewMemoryConsumptionTracker(0, nil, "")
314+
memoryConsumptionTracker := limiter.NewMemoryConsumptionTracker(ctx, 0, nil, "")
314315
left := &operators.TestOperator{Series: testCase.leftSeries, Data: make([]types.InstantVectorSeriesData, len(testCase.leftSeries)), MemoryConsumptionTracker: memoryConsumptionTracker}
315316
right := &operators.TestOperator{Series: testCase.rightSeries, Data: make([]types.InstantVectorSeriesData, len(testCase.rightSeries)), MemoryConsumptionTracker: memoryConsumptionTracker}
316317
vectorMatching := parser.VectorMatching{On: true, MatchingLabels: []string{"group"}}
317318
o := NewAndUnlessBinaryOperation(left, right, vectorMatching, memoryConsumptionTracker, testCase.isUnless, timeRange, posrange.PositionRange{})
318319

319-
ctx := context.Background()
320320
outputSeries, err := o.SeriesMetadata(ctx)
321321
require.NoError(t, err)
322322

@@ -431,14 +431,14 @@ func TestAndUnlessBinaryOperation_ReleasesIntermediateStateIfClosedEarly(t *test
431431
t.Run(name, func(t *testing.T) {
432432
for name, testCase := range testCases {
433433
t.Run(name, func(t *testing.T) {
434+
ctx := context.Background()
434435
timeRange := types.NewInstantQueryTimeRange(time.Now())
435-
memoryConsumptionTracker := limiter.NewMemoryConsumptionTracker(0, nil, "")
436+
memoryConsumptionTracker := limiter.NewMemoryConsumptionTracker(ctx, 0, nil, "")
436437
left := &operators.TestOperator{Series: testCase.leftSeries, Data: make([]types.InstantVectorSeriesData, len(testCase.leftSeries)), MemoryConsumptionTracker: memoryConsumptionTracker}
437438
right := &operators.TestOperator{Series: testCase.rightSeries, Data: make([]types.InstantVectorSeriesData, len(testCase.rightSeries)), MemoryConsumptionTracker: memoryConsumptionTracker}
438439
vectorMatching := parser.VectorMatching{On: true, MatchingLabels: []string{"group"}}
439440
o := NewAndUnlessBinaryOperation(left, right, vectorMatching, memoryConsumptionTracker, isUnless, timeRange, posrange.PositionRange{})
440441

441-
ctx := context.Background()
442442
outputSeries, err := o.SeriesMetadata(ctx)
443443
require.NoError(t, err)
444444

0 commit comments

Comments
 (0)