From 375fc86af7595e2ec83d46d5482172c1bb69abf0 Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Wed, 5 Feb 2020 09:55:00 -0500 Subject: [PATCH] Fixes stats summary computation. (#1636) * Fixes stats summary computation. Signed-off-by: Cyril Tovena * Fixes division by zero Signed-off-by: Cyril Tovena --- pkg/logql/stats/context.go | 40 ++++++++++++++++----------------- pkg/logql/stats/context_test.go | 39 ++++++++++++++++++++++++++++++++ 2 files changed, 59 insertions(+), 20 deletions(-) diff --git a/pkg/logql/stats/context.go b/pkg/logql/stats/context.go index de5aaf6bbe209..9371297ac5121 100644 --- a/pkg/logql/stats/context.go +++ b/pkg/logql/stats/context.go @@ -153,31 +153,29 @@ func Snapshot(ctx context.Context, execTime time.Duration) Result { res.Store.CompressedBytes = c.CompressedBytes res.Store.TotalDuplicates = c.TotalDuplicates } + res.ComputeSummary(execTime) + return res +} +// ComputeSummary calculates the summary based on store and ingester data. +func (r *Result) ComputeSummary(execTime time.Duration) { // calculate the summary - res.Summary.TotalBytesProcessed = res.Store.DecompressedBytes + res.Store.HeadChunkBytes + - res.Ingester.DecompressedBytes + res.Ingester.HeadChunkBytes - res.Summary.BytesProcessedPerSeconds = - int64(float64(res.Summary.TotalBytesProcessed) / - execTime.Seconds()) - res.Summary.TotalLinesProcessed = res.Store.DecompressedLines + res.Store.HeadChunkLines + - res.Ingester.DecompressedLines + res.Ingester.HeadChunkLines - res.Summary.LinesProcessedPerSeconds = - int64(float64(res.Summary.TotalLinesProcessed) / - execTime.Seconds()) - res.Summary.ExecTime = execTime.Seconds() - return res + r.Summary.TotalBytesProcessed = r.Store.DecompressedBytes + r.Store.HeadChunkBytes + + r.Ingester.DecompressedBytes + r.Ingester.HeadChunkBytes + r.Summary.TotalLinesProcessed = r.Store.DecompressedLines + r.Store.HeadChunkLines + + r.Ingester.DecompressedLines + r.Ingester.HeadChunkLines + r.Summary.ExecTime = execTime.Seconds() + if execTime != 0 { + r.Summary.BytesProcessedPerSeconds = + int64(float64(r.Summary.TotalBytesProcessed) / + execTime.Seconds()) + r.Summary.LinesProcessedPerSeconds = + int64(float64(r.Summary.TotalLinesProcessed) / + execTime.Seconds()) + } } func (r *Result) Merge(m Result) { - if r == nil { - return - } - r.Summary.BytesProcessedPerSeconds += m.Summary.BytesProcessedPerSeconds - r.Summary.LinesProcessedPerSeconds += m.Summary.LinesProcessedPerSeconds - r.Summary.TotalBytesProcessed += m.Summary.TotalBytesProcessed - r.Summary.TotalLinesProcessed += m.Summary.TotalLinesProcessed - r.Summary.ExecTime += m.Summary.ExecTime r.Store.TotalChunksRef += m.Store.TotalChunksRef r.Store.TotalChunksDownloaded += m.Store.TotalChunksDownloaded @@ -199,4 +197,6 @@ func (r *Result) Merge(m Result) { r.Ingester.DecompressedLines += m.Ingester.DecompressedLines r.Ingester.CompressedBytes += m.Ingester.CompressedBytes r.Ingester.TotalDuplicates += m.Ingester.TotalDuplicates + + r.ComputeSummary(time.Duration(int64((r.Summary.ExecTime + m.Summary.ExecTime) * float64(time.Second)))) } diff --git a/pkg/logql/stats/context_test.go b/pkg/logql/stats/context_test.go index 5873b3d77b204..b160544261f96 100644 --- a/pkg/logql/stats/context_test.go +++ b/pkg/logql/stats/context_test.go @@ -88,6 +88,9 @@ func fakeIngesterQuery(ctx context.Context) { func TestResult_Merge(t *testing.T) { var res Result + res.Merge(res) // testing zero. + require.Equal(t, res, res) + toMerge := Result{ Ingester: Ingester{ TotalChunksMatched: 200, @@ -123,4 +126,40 @@ func TestResult_Merge(t *testing.T) { res.Merge(toMerge) require.Equal(t, toMerge, res) + + // merge again + res.Merge(toMerge) + require.Equal(t, Result{ + Ingester: Ingester{ + TotalChunksMatched: 2 * 200, + TotalBatches: 2 * 50, + TotalLinesSent: 2 * 60, + HeadChunkBytes: 2 * 10, + HeadChunkLines: 2 * 20, + DecompressedBytes: 2 * 24, + DecompressedLines: 2 * 40, + CompressedBytes: 2 * 60, + TotalDuplicates: 2 * 2, + TotalReached: 2 * 2, + }, + Store: Store{ + TotalChunksRef: 2 * 50, + TotalChunksDownloaded: 2 * 60, + ChunksDownloadTime: 2 * time.Second.Seconds(), + HeadChunkBytes: 2 * 10, + HeadChunkLines: 2 * 20, + DecompressedBytes: 2 * 40, + DecompressedLines: 2 * 20, + CompressedBytes: 2 * 30, + TotalDuplicates: 2 * 10, + }, + Summary: Summary{ + ExecTime: 2 * 2 * time.Second.Seconds(), + BytesProcessedPerSeconds: int64(42), // 2 requests at the same pace should give the same bytes/lines per sec + LinesProcessedPerSeconds: int64(50), + TotalBytesProcessed: 2 * int64(84), + TotalLinesProcessed: 2 * int64(100), + }, + }, res) + }