From fb9018ddaab87d2d2e51f62c3066392e56a6da56 Mon Sep 17 00:00:00 2001 From: Aliaksandr Valialkin Date: Thu, 30 May 2024 15:16:34 +0200 Subject: [PATCH] wip --- docs/VictoriaLogs/CHANGELOG.md | 2 +- docs/VictoriaLogs/LogsQL.md | 36 +++++++-------- lib/logstorage/parser_test.go | 22 +++++----- lib/logstorage/pipe_stats.go | 12 ++--- .../{stats_fields_max.go => stats_row_max.go} | 44 +++++++++---------- ...elds_max_test.go => stats_row_max_test.go} | 38 ++++++++-------- .../{stats_fields_min.go => stats_row_min.go} | 44 +++++++++---------- ...elds_min_test.go => stats_row_min_test.go} | 38 ++++++++-------- 8 files changed, 118 insertions(+), 118 deletions(-) rename lib/logstorage/{stats_fields_max.go => stats_row_max.go} (77%) rename lib/logstorage/{stats_fields_max_test.go => stats_row_max_test.go} (79%) rename lib/logstorage/{stats_fields_min.go => stats_row_min.go} (77%) rename lib/logstorage/{stats_fields_min_test.go => stats_row_min_test.go} (79%) diff --git a/docs/VictoriaLogs/CHANGELOG.md b/docs/VictoriaLogs/CHANGELOG.md index 12ae5675e..13b5209a8 100644 --- a/docs/VictoriaLogs/CHANGELOG.md +++ b/docs/VictoriaLogs/CHANGELOG.md @@ -109,7 +109,7 @@ Released at 2024-05-22 * FEATURE: add ability to extract fields with [`extract` pipe](https://docs.victoriametrics.com/victorialogs/logsql/#extract-pipe) only if the given condition is met. See [these docs](https://docs.victoriametrics.com/victorialogs/logsql/#conditional-extract). * FEATURE: add ability to unpack JSON fields with [`unpack_json` pipe](https://docs.victoriametrics.com/victorialogs/logsql/#unpack_json-pipe) only if the given condition is met. See [these docs](https://docs.victoriametrics.com/victorialogs/logsql/#conditional-unpack_json). * FEATURE: add ability to unpack [logfmt](https://brandur.org/logfmt) fields with [`unpack_logfmt` pipe](https://docs.victoriametrics.com/victorialogs/logsql/#unpack_logfmt-pipe) only if the given condition is met. See [these docs](https://docs.victoriametrics.com/victorialogs/logsql/#conditional-unpack_logfmt). -* FEATURE: add [`fields_min`](https://docs.victoriametrics.com/victorialogs/logsql/#fields_min-stats) and [`fields_max`](https://docs.victoriametrics.com/victorialogs/logsql/#fields_max-stats) functions for [`stats` pipe](https://docs.victoriametrics.com/victorialogs/logsql/#stats-pipe), which allow returning all the [log fields](https://docs.victoriametrics.com/victorialogs/keyconcepts/#data-model) for the log entry with the minimum / maximum value at the given field. +* FEATURE: add [`row_min`](https://docs.victoriametrics.com/victorialogs/logsql/#row_min-stats) and [`row_max`](https://docs.victoriametrics.com/victorialogs/logsql/#row_max-stats) functions for [`stats` pipe](https://docs.victoriametrics.com/victorialogs/logsql/#stats-pipe), which allow returning all the [log fields](https://docs.victoriametrics.com/victorialogs/keyconcepts/#data-model) for the log entry with the minimum / maximum value at the given field. * FEATURE: add `/select/logsql/streams` HTTP endpoint for returning [streams](https://docs.victoriametrics.com/victorialogs/keyconcepts/#stream-fields) from results of the given query. See [these docs](https://docs.victoriametrics.com/victorialogs/querying/#querying-streams) for details. * FEATURE: add `/select/logsql/stream_field_names` HTTP endpoint for returning [stream](https://docs.victoriametrics.com/victorialogs/keyconcepts/#stream-fields) field names from results of the given query. See [these docs](https://docs.victoriametrics.com/victorialogs/querying/#querying-stream-field-names) for details. * FEATURE: add `/select/logsql/stream_field_values` HTTP endpoint for returning [stream](https://docs.victoriametrics.com/victorialogs/keyconcepts/#stream-fields) field values for the given label from results of the given query. See [these docs](https://docs.victoriametrics.com/victorialogs/querying/#querying-stream-field-values) for details. diff --git a/docs/VictoriaLogs/LogsQL.md b/docs/VictoriaLogs/LogsQL.md index 6b785e977..8d970f41a 100644 --- a/docs/VictoriaLogs/LogsQL.md +++ b/docs/VictoriaLogs/LogsQL.md @@ -2299,8 +2299,8 @@ LogsQL supports the following functions for [`stats` pipe](#stats-pipe): - [`count`](#count-stats) returns the number of log entries. - [`count_empty`](#count_empty-stats) returns the number logs with empty [log fields](https://docs.victoriametrics.com/victorialogs/keyconcepts/#data-model). - [`count_uniq`](#count_uniq-stats) returns the number of unique non-empty values for the given [log fields](https://docs.victoriametrics.com/victorialogs/keyconcepts/#data-model). -- [`fields_max`](#fields_max-stats) returns the [log entry](https://docs.victoriametrics.com/victorialogs/keyconcepts/#data-model) with the minimum value at the given field. -- [`fields_min`](#fields_min-stats) returns the [log entry](https://docs.victoriametrics.com/victorialogs/keyconcepts/#data-model) with the maximum value at the given field. +- [`row_max`](#row_max-stats) returns the [log entry](https://docs.victoriametrics.com/victorialogs/keyconcepts/#data-model) with the minimum value at the given field. +- [`row_min`](#row_min-stats) returns the [log entry](https://docs.victoriametrics.com/victorialogs/keyconcepts/#data-model) with the maximum value at the given field. - [`max`](#max-stats) returns the maximum value over the given numeric [log fields](https://docs.victoriametrics.com/victorialogs/keyconcepts/#data-model). - [`median`](#median-stats) returns the [median](https://en.wikipedia.org/wiki/Median) value over the given numeric [log fields](https://docs.victoriametrics.com/victorialogs/keyconcepts/#data-model). - [`min`](#min-stats) returns the minumum value over the given numeric [log fields](https://docs.victoriametrics.com/victorialogs/keyconcepts/#data-model). @@ -2412,58 +2412,58 @@ See also: - [`uniq_values`](#uniq_values-stats) - [`count`](#count-stats) -### fields_max stats +### row_max stats -`fields_max(field)` [stats pipe function](#stats-pipe-functions) returns [log entry](https://docs.victoriametrics.com/victorialogs/keyconcepts/#data-model) +`row_max(field)` [stats pipe function](#stats-pipe-functions) returns [log entry](https://docs.victoriametrics.com/victorialogs/keyconcepts/#data-model) with the maximum value for the given `field`. Log entry is returned as JSON-encoded dictionary with all the fields from the original log. For example, the following query returns log entry with the maximum value for the `duration` [field](https://docs.victoriametrics.com/victorialogs/keyconcepts/#data-model) across logs for the last 5 minutes: ```logsql -_time:5m | stats fields_max(duration) as log_with_max_duration +_time:5m | stats row_max(duration) as log_with_max_duration ``` Fields from the returned values can be decoded with [`unpack_json`](#unpack_json-pipe) or [`extract`](#extract-pipe) pipes. -If only the specific fields are needed from the returned log entry, then they can be enumerated inside `fields_max(...)`. +If only the specific fields are needed from the returned log entry, then they can be enumerated inside `row_max(...)`. For example, the following query returns only `_time`, `path` and `duration` fields from the log entry with the maximum `duration` over the last 5 minutes: ```logsql -_time:5m | stats fields_max(duration, _time, path, duration) as time_and_ip_with_max_duration +_time:5m | stats row_max(duration, _time, path, duration) as time_and_ip_with_max_duration ``` See also: - [`max`](#max-stats) -- [`fields_min`](#fields_min-stats) +- [`row_min`](#row_min-stats) -### fields_min stats +### row_min stats -`fields_min(field)` [stats pipe function](#stats-pipe-functions) returns [log entry](https://docs.victoriametrics.com/victorialogs/keyconcepts/#data-model) +`row_min(field)` [stats pipe function](#stats-pipe-functions) returns [log entry](https://docs.victoriametrics.com/victorialogs/keyconcepts/#data-model) with the minimum value for the given `field`. Log entry is returned as JSON-encoded dictionary with all the fields from the original log. For example, the following query returns log entry with the minimum value for the `duration` [field](https://docs.victoriametrics.com/victorialogs/keyconcepts/#data-model) across logs for the last 5 minutes: ```logsql -_time:5m | stats fields_min(duration) as log_with_min_duration +_time:5m | stats row_min(duration) as log_with_min_duration ``` Fields from the returned values can be decoded with [`unpack_json`](#unpack_json-pipe) or [`extract`](#extract-pipe) pipes. -If only the specific fields are needed from the returned log entry, then they can be enumerated inside `fields_max(...)`. +If only the specific fields are needed from the returned log entry, then they can be enumerated inside `row_max(...)`. For example, the following query returns only `_time`, `path` and `duration` fields from the log entry with the minimum `duration` over the last 5 minutes: ```logsql -_time:5m | stats fields_min(duration, _time, path, duration) as time_and_ip_with_min_duration +_time:5m | stats row_min(duration, _time, path, duration) as time_and_ip_with_min_duration ``` See also: - [`min`](#min-stats) -- [`fields_max`](#fields_max-stats) +- [`row_max`](#row_max-stats) ### max stats @@ -2477,11 +2477,11 @@ over logs for the last 5 minutes: _time:5m | stats max(duration) max_duration ``` -[`fields_max`](#fields_max-stats) function can be used for obtaining other fields with the maximum duration. +[`row_max`](#row_max-stats) function can be used for obtaining other fields with the maximum duration. See also: -- [`fields_max`](#fields_max-stats) +- [`row_max`](#row_max-stats) - [`min`](#min-stats) - [`quantile`](#quantile-stats) - [`avg`](#avg-stats) @@ -2515,11 +2515,11 @@ over logs for the last 5 minutes: _time:5m | stats min(duration) min_duration ``` -[`fields_min`](#fields_min-stats) function can be used for obtaining other fields with the minimum duration. +[`row_min`](#row_min-stats) function can be used for obtaining other fields with the minimum duration. See also: -- [`fields_min`](#fields_min-stats) +- [`row_min`](#row_min-stats) - [`max`](#max-stats) - [`quantile`](#quantile-stats) - [`avg`](#avg-stats) diff --git a/lib/logstorage/parser_test.go b/lib/logstorage/parser_test.go index 2ab4aaf1c..7391781ee 100644 --- a/lib/logstorage/parser_test.go +++ b/lib/logstorage/parser_test.go @@ -940,10 +940,10 @@ func TestParseQuerySuccess(t *testing.T) { f(`* | stats min(*) x`, `* | stats min(*) as x`) f(`* | stats min(foo,*,bar) x`, `* | stats min(*) as x`) - // stats pipe fields_min - f(`* | stats fields_Min(foo) bar`, `* | stats fields_min(foo) as bar`) - f(`* | fields_Min(foo)`, `* | stats fields_min(foo) as "fields_min(foo)"`) - f(`* | stats BY(x, y, ) fields_MIN(foo,bar,) bar`, `* | stats by (x, y) fields_min(foo, bar) as bar`) + // stats pipe row_min + f(`* | stats fields_Min(foo) bar`, `* | stats row_min(foo) as bar`) + f(`* | fields_Min(foo)`, `* | stats row_min(foo) as "row_min(foo)"`) + f(`* | stats BY(x, y, ) fields_MIN(foo,bar,) bar`, `* | stats by (x, y) row_min(foo, bar) as bar`) // stats pipe avg f(`* | stats Avg(foo) bar`, `* | stats avg(foo) as bar`) @@ -1388,7 +1388,7 @@ func TestParseQueryFailure(t *testing.T) { f(`foo | stats min`) // invalid stats min - f(`foo | stats fields_min`) + f(`foo | stats row_min`) // invalid stats avg f(`foo | stats avg`) @@ -1627,12 +1627,12 @@ func TestQueryGetNeededColumns(t *testing.T) { f(`* | stats count_uniq() q`, `*`, ``) f(`* | stats count_uniq(*) q`, `*`, ``) f(`* | stats count_uniq(x) q`, `x`, ``) - f(`* | stats fields_max(a) q`, `*`, ``) - f(`* | stats fields_max(a, *) q`, `*`, ``) - f(`* | stats fields_max(a, x) q`, `a,x`, ``) - f(`* | stats fields_min(a) q`, `*`, ``) - f(`* | stats fields_min(a, *) q`, `*`, ``) - f(`* | stats fields_min(a, x) q`, `a,x`, ``) + f(`* | stats row_max(a) q`, `*`, ``) + f(`* | stats row_max(a, *) q`, `*`, ``) + f(`* | stats row_max(a, x) q`, `a,x`, ``) + f(`* | stats row_min(a) q`, `*`, ``) + f(`* | stats row_min(a, *) q`, `*`, ``) + f(`* | stats row_min(a, x) q`, `a,x`, ``) f(`* | stats min() q`, `*`, ``) f(`* | stats min(*) q`, `*`, ``) f(`* | stats min(x) q`, `x`, ``) diff --git a/lib/logstorage/pipe_stats.go b/lib/logstorage/pipe_stats.go index b73744fbb..7f9248d8f 100644 --- a/lib/logstorage/pipe_stats.go +++ b/lib/logstorage/pipe_stats.go @@ -631,16 +631,16 @@ func parseStatsFunc(lex *lexer) (statsFunc, error) { return nil, fmt.Errorf("cannot parse 'count_uniq' func: %w", err) } return sus, nil - case lex.isKeyword("fields_max"): - sms, err := parseStatsFieldsMax(lex) + case lex.isKeyword("row_max"): + sms, err := parseStatsRowMax(lex) if err != nil { - return nil, fmt.Errorf("cannot parse 'fields_max' func: %w", err) + return nil, fmt.Errorf("cannot parse 'row_max' func: %w", err) } return sms, nil - case lex.isKeyword("fields_min"): - sms, err := parseStatsFieldsMin(lex) + case lex.isKeyword("row_min"): + sms, err := parseStatsRowMin(lex) if err != nil { - return nil, fmt.Errorf("cannot parse 'fields_min' func: %w", err) + return nil, fmt.Errorf("cannot parse 'row_min' func: %w", err) } return sms, nil case lex.isKeyword("max"): diff --git a/lib/logstorage/stats_fields_max.go b/lib/logstorage/stats_row_max.go similarity index 77% rename from lib/logstorage/stats_fields_max.go rename to lib/logstorage/stats_row_max.go index 599cf48b0..8f53f0f65 100644 --- a/lib/logstorage/stats_fields_max.go +++ b/lib/logstorage/stats_row_max.go @@ -11,14 +11,14 @@ import ( "github.com/VictoriaMetrics/VictoriaMetrics/lib/logger" ) -type statsFieldsMax struct { +type statsRowMax struct { srcField string fetchFields []string } -func (sm *statsFieldsMax) String() string { - s := "fields_max(" + quoteTokenIfNeeded(sm.srcField) +func (sm *statsRowMax) String() string { + s := "row_max(" + quoteTokenIfNeeded(sm.srcField) if len(sm.fetchFields) > 0 { s += ", " + fieldNamesString(sm.fetchFields) } @@ -26,7 +26,7 @@ func (sm *statsFieldsMax) String() string { return s } -func (sm *statsFieldsMax) updateNeededFields(neededFields fieldsSet) { +func (sm *statsRowMax) updateNeededFields(neededFields fieldsSet) { if len(sm.fetchFields) == 0 { neededFields.add("*") } else { @@ -35,22 +35,22 @@ func (sm *statsFieldsMax) updateNeededFields(neededFields fieldsSet) { neededFields.add(sm.srcField) } -func (sm *statsFieldsMax) newStatsProcessor() (statsProcessor, int) { - smp := &statsFieldsMaxProcessor{ +func (sm *statsRowMax) newStatsProcessor() (statsProcessor, int) { + smp := &statsRowMaxProcessor{ sm: sm, } return smp, int(unsafe.Sizeof(*smp)) } -type statsFieldsMaxProcessor struct { - sm *statsFieldsMax +type statsRowMaxProcessor struct { + sm *statsRowMax max string fields []Field } -func (smp *statsFieldsMaxProcessor) updateStatsForAllRows(br *blockResult) int { +func (smp *statsRowMaxProcessor) updateStatsForAllRows(br *blockResult) int { stateSizeIncrease := 0 c := br.getColumnByName(smp.sm.srcField) @@ -114,7 +114,7 @@ func (smp *statsFieldsMaxProcessor) updateStatsForAllRows(br *blockResult) int { return stateSizeIncrease } -func (smp *statsFieldsMaxProcessor) updateStatsForRow(br *blockResult, rowIdx int) int { +func (smp *statsRowMaxProcessor) updateStatsForRow(br *blockResult, rowIdx int) int { stateSizeIncrease := 0 c := br.getColumnByName(smp.sm.srcField) @@ -138,27 +138,27 @@ func (smp *statsFieldsMaxProcessor) updateStatsForRow(br *blockResult, rowIdx in return stateSizeIncrease } -func (smp *statsFieldsMaxProcessor) mergeState(sfp statsProcessor) { - src := sfp.(*statsFieldsMaxProcessor) +func (smp *statsRowMaxProcessor) mergeState(sfp statsProcessor) { + src := sfp.(*statsRowMaxProcessor) if smp.needUpdateStateString(src.max) { smp.max = src.max smp.fields = src.fields } } -func (smp *statsFieldsMaxProcessor) needUpdateStateBytes(b []byte) bool { +func (smp *statsRowMaxProcessor) needUpdateStateBytes(b []byte) bool { v := bytesutil.ToUnsafeString(b) return smp.needUpdateStateString(v) } -func (smp *statsFieldsMaxProcessor) needUpdateStateString(v string) bool { +func (smp *statsRowMaxProcessor) needUpdateStateString(v string) bool { if v == "" { return false } return smp.max == "" || lessString(smp.max, v) } -func (smp *statsFieldsMaxProcessor) updateState(v string, br *blockResult, rowIdx int) int { +func (smp *statsRowMaxProcessor) updateState(v string, br *blockResult, rowIdx int) int { stateSizeIncrease := 0 if !smp.needUpdateStateString(v) { @@ -204,7 +204,7 @@ func (smp *statsFieldsMaxProcessor) updateState(v string, br *blockResult, rowId return stateSizeIncrease } -func (smp *statsFieldsMaxProcessor) finalizeStats() string { +func (smp *statsRowMaxProcessor) finalizeStats() string { bb := bbPool.Get() bb.B = marshalFieldsToJSON(bb.B, smp.fields) result := string(bb.B) @@ -213,18 +213,18 @@ func (smp *statsFieldsMaxProcessor) finalizeStats() string { return result } -func parseStatsFieldsMax(lex *lexer) (*statsFieldsMax, error) { - if !lex.isKeyword("fields_max") { - return nil, fmt.Errorf("unexpected func; got %q; want 'fields_max'", lex.token) +func parseStatsRowMax(lex *lexer) (*statsRowMax, error) { + if !lex.isKeyword("row_max") { + return nil, fmt.Errorf("unexpected func; got %q; want 'row_max'", lex.token) } lex.nextToken() fields, err := parseFieldNamesInParens(lex) if err != nil { - return nil, fmt.Errorf("cannot parse 'fields_max' args: %w", err) + return nil, fmt.Errorf("cannot parse 'row_max' args: %w", err) } if len(fields) == 0 { - return nil, fmt.Errorf("missing first arg for 'fields_max' func - source field") + return nil, fmt.Errorf("missing first arg for 'row_max' func - source field") } srcField := fields[0] @@ -233,7 +233,7 @@ func parseStatsFieldsMax(lex *lexer) (*statsFieldsMax, error) { fetchFields = nil } - sm := &statsFieldsMax{ + sm := &statsRowMax{ srcField: srcField, fetchFields: fetchFields, } diff --git a/lib/logstorage/stats_fields_max_test.go b/lib/logstorage/stats_row_max_test.go similarity index 79% rename from lib/logstorage/stats_fields_max_test.go rename to lib/logstorage/stats_row_max_test.go index de4e6e5e5..39fd82564 100644 --- a/lib/logstorage/stats_fields_max_test.go +++ b/lib/logstorage/stats_row_max_test.go @@ -4,35 +4,35 @@ import ( "testing" ) -func TestParseStatsFieldsMaxSuccess(t *testing.T) { +func TestParseStatsRowMaxSuccess(t *testing.T) { f := func(pipeStr string) { t.Helper() expectParseStatsFuncSuccess(t, pipeStr) } - f(`fields_max(foo)`) - f(`fields_max(foo, bar)`) - f(`fields_max(foo, bar, baz)`) + f(`row_max(foo)`) + f(`row_max(foo, bar)`) + f(`row_max(foo, bar, baz)`) } -func TestParseStatsFieldsMaxFailure(t *testing.T) { +func TestParseStatsRowMaxFailure(t *testing.T) { f := func(pipeStr string) { t.Helper() expectParseStatsFuncFailure(t, pipeStr) } - f(`fields_max`) - f(`fields_max()`) - f(`fields_max(x) bar`) + f(`row_max`) + f(`row_max()`) + f(`row_max(x) bar`) } -func TestStatsFieldsMax(t *testing.T) { +func TestStatsRowMax(t *testing.T) { f := func(pipeStr string, rows, rowsExpected [][]Field) { t.Helper() expectPipeResults(t, pipeStr, rows, rowsExpected) } - f("stats fields_max(a) as x", [][]Field{ + f("stats row_max(a) as x", [][]Field{ { {"_msg", `abc`}, {"a", `2`}, @@ -52,7 +52,7 @@ func TestStatsFieldsMax(t *testing.T) { }, }) - f("stats fields_max(foo) as x", [][]Field{ + f("stats row_max(foo) as x", [][]Field{ { {"_msg", `abc`}, {"a", `2`}, @@ -72,7 +72,7 @@ func TestStatsFieldsMax(t *testing.T) { }, }) - f("stats fields_max(b, a) as x", [][]Field{ + f("stats row_max(b, a) as x", [][]Field{ { {"_msg", `abc`}, {"a", `2`}, @@ -93,7 +93,7 @@ func TestStatsFieldsMax(t *testing.T) { }, }) - f("stats fields_max(b, a, x, b) as x", [][]Field{ + f("stats row_max(b, a, x, b) as x", [][]Field{ { {"_msg", `abc`}, {"a", `2`}, @@ -114,7 +114,7 @@ func TestStatsFieldsMax(t *testing.T) { }, }) - f("stats fields_max(a) if (b:*) as x", [][]Field{ + f("stats row_max(a) if (b:*) as x", [][]Field{ { {"_msg", `abc`}, {"a", `2`}, @@ -134,7 +134,7 @@ func TestStatsFieldsMax(t *testing.T) { }, }) - f("stats by (b) fields_max(a) if (b:*) as x", [][]Field{ + f("stats by (b) row_max(a) if (b:*) as x", [][]Field{ { {"_msg", `abc`}, {"a", `2`}, @@ -160,7 +160,7 @@ func TestStatsFieldsMax(t *testing.T) { }, }) - f("stats by (a) fields_max(b) as x", [][]Field{ + f("stats by (a) row_max(b) as x", [][]Field{ { {"_msg", `abc`}, {"a", `1`}, @@ -189,7 +189,7 @@ func TestStatsFieldsMax(t *testing.T) { }, }) - f("stats by (a) fields_max(c) as x", [][]Field{ + f("stats by (a) row_max(c) as x", [][]Field{ { {"_msg", `abc`}, {"a", `1`}, @@ -218,7 +218,7 @@ func TestStatsFieldsMax(t *testing.T) { }, }) - f("stats by (a) fields_max(b, c) as x", [][]Field{ + f("stats by (a) row_max(b, c) as x", [][]Field{ { {"_msg", `abc`}, {"a", `1`}, @@ -250,7 +250,7 @@ func TestStatsFieldsMax(t *testing.T) { }, }) - f("stats by (a, b) fields_max(c) as x", [][]Field{ + f("stats by (a, b) row_max(c) as x", [][]Field{ { {"_msg", `abc`}, {"a", `1`}, diff --git a/lib/logstorage/stats_fields_min.go b/lib/logstorage/stats_row_min.go similarity index 77% rename from lib/logstorage/stats_fields_min.go rename to lib/logstorage/stats_row_min.go index e57b466e8..9aa69681a 100644 --- a/lib/logstorage/stats_fields_min.go +++ b/lib/logstorage/stats_row_min.go @@ -11,14 +11,14 @@ import ( "github.com/VictoriaMetrics/VictoriaMetrics/lib/logger" ) -type statsFieldsMin struct { +type statsRowMin struct { srcField string fetchFields []string } -func (sm *statsFieldsMin) String() string { - s := "fields_min(" + quoteTokenIfNeeded(sm.srcField) +func (sm *statsRowMin) String() string { + s := "row_min(" + quoteTokenIfNeeded(sm.srcField) if len(sm.fetchFields) > 0 { s += ", " + fieldNamesString(sm.fetchFields) } @@ -26,7 +26,7 @@ func (sm *statsFieldsMin) String() string { return s } -func (sm *statsFieldsMin) updateNeededFields(neededFields fieldsSet) { +func (sm *statsRowMin) updateNeededFields(neededFields fieldsSet) { if len(sm.fetchFields) == 0 { neededFields.add("*") } else { @@ -35,22 +35,22 @@ func (sm *statsFieldsMin) updateNeededFields(neededFields fieldsSet) { neededFields.add(sm.srcField) } -func (sm *statsFieldsMin) newStatsProcessor() (statsProcessor, int) { - smp := &statsFieldsMinProcessor{ +func (sm *statsRowMin) newStatsProcessor() (statsProcessor, int) { + smp := &statsRowMinProcessor{ sm: sm, } return smp, int(unsafe.Sizeof(*smp)) } -type statsFieldsMinProcessor struct { - sm *statsFieldsMin +type statsRowMinProcessor struct { + sm *statsRowMin min string fields []Field } -func (smp *statsFieldsMinProcessor) updateStatsForAllRows(br *blockResult) int { +func (smp *statsRowMinProcessor) updateStatsForAllRows(br *blockResult) int { stateSizeIncrease := 0 c := br.getColumnByName(smp.sm.srcField) @@ -114,7 +114,7 @@ func (smp *statsFieldsMinProcessor) updateStatsForAllRows(br *blockResult) int { return stateSizeIncrease } -func (smp *statsFieldsMinProcessor) updateStatsForRow(br *blockResult, rowIdx int) int { +func (smp *statsRowMinProcessor) updateStatsForRow(br *blockResult, rowIdx int) int { stateSizeIncrease := 0 c := br.getColumnByName(smp.sm.srcField) @@ -138,27 +138,27 @@ func (smp *statsFieldsMinProcessor) updateStatsForRow(br *blockResult, rowIdx in return stateSizeIncrease } -func (smp *statsFieldsMinProcessor) mergeState(sfp statsProcessor) { - src := sfp.(*statsFieldsMinProcessor) +func (smp *statsRowMinProcessor) mergeState(sfp statsProcessor) { + src := sfp.(*statsRowMinProcessor) if smp.needUpdateStateString(src.min) { smp.min = src.min smp.fields = src.fields } } -func (smp *statsFieldsMinProcessor) needUpdateStateBytes(b []byte) bool { +func (smp *statsRowMinProcessor) needUpdateStateBytes(b []byte) bool { v := bytesutil.ToUnsafeString(b) return smp.needUpdateStateString(v) } -func (smp *statsFieldsMinProcessor) needUpdateStateString(v string) bool { +func (smp *statsRowMinProcessor) needUpdateStateString(v string) bool { if v == "" { return false } return smp.min == "" || lessString(v, smp.min) } -func (smp *statsFieldsMinProcessor) updateState(v string, br *blockResult, rowIdx int) int { +func (smp *statsRowMinProcessor) updateState(v string, br *blockResult, rowIdx int) int { stateSizeIncrease := 0 if !smp.needUpdateStateString(v) { @@ -204,7 +204,7 @@ func (smp *statsFieldsMinProcessor) updateState(v string, br *blockResult, rowId return stateSizeIncrease } -func (smp *statsFieldsMinProcessor) finalizeStats() string { +func (smp *statsRowMinProcessor) finalizeStats() string { bb := bbPool.Get() bb.B = marshalFieldsToJSON(bb.B, smp.fields) result := string(bb.B) @@ -213,18 +213,18 @@ func (smp *statsFieldsMinProcessor) finalizeStats() string { return result } -func parseStatsFieldsMin(lex *lexer) (*statsFieldsMin, error) { - if !lex.isKeyword("fields_min") { - return nil, fmt.Errorf("unexpected func; got %q; want 'fields_min'", lex.token) +func parseStatsRowMin(lex *lexer) (*statsRowMin, error) { + if !lex.isKeyword("row_min") { + return nil, fmt.Errorf("unexpected func; got %q; want 'row_min'", lex.token) } lex.nextToken() fields, err := parseFieldNamesInParens(lex) if err != nil { - return nil, fmt.Errorf("cannot parse 'fields_min' args: %w", err) + return nil, fmt.Errorf("cannot parse 'row_min' args: %w", err) } if len(fields) == 0 { - return nil, fmt.Errorf("missing first arg for 'fields_min' func - source field") + return nil, fmt.Errorf("missing first arg for 'row_min' func - source field") } srcField := fields[0] @@ -233,7 +233,7 @@ func parseStatsFieldsMin(lex *lexer) (*statsFieldsMin, error) { fetchFields = nil } - sm := &statsFieldsMin{ + sm := &statsRowMin{ srcField: srcField, fetchFields: fetchFields, } diff --git a/lib/logstorage/stats_fields_min_test.go b/lib/logstorage/stats_row_min_test.go similarity index 79% rename from lib/logstorage/stats_fields_min_test.go rename to lib/logstorage/stats_row_min_test.go index dbbdb8d27..67225c7f6 100644 --- a/lib/logstorage/stats_fields_min_test.go +++ b/lib/logstorage/stats_row_min_test.go @@ -4,35 +4,35 @@ import ( "testing" ) -func TestParseStatsFieldsMinSuccess(t *testing.T) { +func TestParseStatsRowMinSuccess(t *testing.T) { f := func(pipeStr string) { t.Helper() expectParseStatsFuncSuccess(t, pipeStr) } - f(`fields_min(foo)`) - f(`fields_min(foo, bar)`) - f(`fields_min(foo, bar, baz)`) + f(`row_min(foo)`) + f(`row_min(foo, bar)`) + f(`row_min(foo, bar, baz)`) } -func TestParseStatsFieldsMinFailure(t *testing.T) { +func TestParseStatsRowMinFailure(t *testing.T) { f := func(pipeStr string) { t.Helper() expectParseStatsFuncFailure(t, pipeStr) } - f(`fields_min`) - f(`fields_min()`) - f(`fields_min(x) bar`) + f(`row_min`) + f(`row_min()`) + f(`row_min(x) bar`) } -func TestStatsFieldsMin(t *testing.T) { +func TestStatsRowMin(t *testing.T) { f := func(pipeStr string, rows, rowsExpected [][]Field) { t.Helper() expectPipeResults(t, pipeStr, rows, rowsExpected) } - f("stats fields_min(a) as x", [][]Field{ + f("stats row_min(a) as x", [][]Field{ { {"_msg", `abc`}, {"a", `2`}, @@ -52,7 +52,7 @@ func TestStatsFieldsMin(t *testing.T) { }, }) - f("stats fields_min(foo) as x", [][]Field{ + f("stats row_min(foo) as x", [][]Field{ { {"_msg", `abc`}, {"a", `2`}, @@ -72,7 +72,7 @@ func TestStatsFieldsMin(t *testing.T) { }, }) - f("stats fields_min(b, a) as x", [][]Field{ + f("stats row_min(b, a) as x", [][]Field{ { {"_msg", `abc`}, {"a", `2`}, @@ -93,7 +93,7 @@ func TestStatsFieldsMin(t *testing.T) { }, }) - f("stats fields_min(b, a, x, b) as x", [][]Field{ + f("stats row_min(b, a, x, b) as x", [][]Field{ { {"_msg", `abc`}, {"a", `2`}, @@ -114,7 +114,7 @@ func TestStatsFieldsMin(t *testing.T) { }, }) - f("stats fields_min(a) if (b:*) as x", [][]Field{ + f("stats row_min(a) if (b:*) as x", [][]Field{ { {"_msg", `abc`}, {"a", `2`}, @@ -134,7 +134,7 @@ func TestStatsFieldsMin(t *testing.T) { }, }) - f("stats by (b) fields_min(a) if (b:*) as x", [][]Field{ + f("stats by (b) row_min(a) if (b:*) as x", [][]Field{ { {"_msg", `abc`}, {"a", `2`}, @@ -160,7 +160,7 @@ func TestStatsFieldsMin(t *testing.T) { }, }) - f("stats by (a) fields_min(b) as x", [][]Field{ + f("stats by (a) row_min(b) as x", [][]Field{ { {"_msg", `abc`}, {"a", `1`}, @@ -189,7 +189,7 @@ func TestStatsFieldsMin(t *testing.T) { }, }) - f("stats by (a) fields_min(c) as x", [][]Field{ + f("stats by (a) row_min(c) as x", [][]Field{ { {"_msg", `abc`}, {"a", `1`}, @@ -218,7 +218,7 @@ func TestStatsFieldsMin(t *testing.T) { }, }) - f("stats by (a) fields_min(b, c) as x", [][]Field{ + f("stats by (a) row_min(b, c) as x", [][]Field{ { {"_msg", `abc`}, {"a", `1`}, @@ -249,7 +249,7 @@ func TestStatsFieldsMin(t *testing.T) { }, }) - f("stats by (a, b) fields_min(c) as x", [][]Field{ + f("stats by (a, b) row_min(c) as x", [][]Field{ { {"_msg", `abc`}, {"a", `1`},