mirror of
https://github.com/VictoriaMetrics/VictoriaMetrics.git
synced 2024-12-31 15:06:26 +00:00
wip
This commit is contained in:
parent
6b63f65baf
commit
9c4423c1db
4 changed files with 35 additions and 35 deletions
|
@ -1327,7 +1327,7 @@ LogsQL supports the following functions for [`stats` pipe](#stats-pipe):
|
||||||
- [`min`](#min-stats) calculates the minumum value over the given numeric [log fields](https://docs.victoriametrics.com/victorialogs/keyconcepts/#data-model).
|
- [`min`](#min-stats) calculates the minumum value over the given numeric [log fields](https://docs.victoriametrics.com/victorialogs/keyconcepts/#data-model).
|
||||||
- [`sum`](#sum-stats) calculates the sum for the given numeric [log fields](https://docs.victoriametrics.com/victorialogs/keyconcepts/#data-model).
|
- [`sum`](#sum-stats) calculates the sum for the given numeric [log fields](https://docs.victoriametrics.com/victorialogs/keyconcepts/#data-model).
|
||||||
- [`uniq`](#uniq-stats) calculates the number of unique non-empty values for the given [log fields](https://docs.victoriametrics.com/victorialogs/keyconcepts/#data-model).
|
- [`uniq`](#uniq-stats) calculates the number of unique non-empty values for the given [log fields](https://docs.victoriametrics.com/victorialogs/keyconcepts/#data-model).
|
||||||
- [`uniq_array`](#uniq_array-stats) returns unique non-empty values for the given [log fields](https://docs.victoriametrics.com/victorialogs/keyconcepts/#data-model).
|
- [`uniq_values`](#uniq_values-stats) returns unique non-empty values for the given [log fields](https://docs.victoriametrics.com/victorialogs/keyconcepts/#data-model).
|
||||||
|
|
||||||
### avg stats
|
### avg stats
|
||||||
|
|
||||||
|
@ -1475,12 +1475,12 @@ _time:5m | stats uniq(host, path) unique_host_path_pairs
|
||||||
|
|
||||||
See also:
|
See also:
|
||||||
|
|
||||||
- [`uniq_array`](#uniq_array-stats)
|
- [`uniq_values`](#uniq_values-stats)
|
||||||
- [`count`](#count-stats)
|
- [`count`](#count-stats)
|
||||||
|
|
||||||
### uniq_array stats
|
### uniq_values stats
|
||||||
|
|
||||||
`uniq_array(field1, ..., fieldN)` [stats pipe](#stats-pipe) returns the unique non-empty values across
|
`uniq_values(field1, ..., fieldN)` [stats pipe](#stats-pipe) returns the unique non-empty values across
|
||||||
the mentioned [log fields](https://docs.victoriametrics.com/victorialogs/keyconcepts/#data-model).
|
the mentioned [log fields](https://docs.victoriametrics.com/victorialogs/keyconcepts/#data-model).
|
||||||
The returned values are sorted and encoded in JSON array.
|
The returned values are sorted and encoded in JSON array.
|
||||||
|
|
||||||
|
@ -1488,7 +1488,7 @@ For example, the following query returns unique non-empty values for the `ip` [f
|
||||||
over logs for the last 5 minutes:
|
over logs for the last 5 minutes:
|
||||||
|
|
||||||
```logsql
|
```logsql
|
||||||
_time:5m | stats uniq_array(ip) unique_ips
|
_time:5m | stats uniq_values(ip) unique_ips
|
||||||
```
|
```
|
||||||
|
|
||||||
See also:
|
See also:
|
||||||
|
|
|
@ -911,12 +911,12 @@ func TestParseQuerySuccess(t *testing.T) {
|
||||||
f(`* | stats by(x) uniq() z`, `* | stats by (x) uniq(*) as z`)
|
f(`* | stats by(x) uniq() z`, `* | stats by (x) uniq(*) as z`)
|
||||||
f(`* | stats by(x) uniq(a,*,b) z`, `* | stats by (x) uniq(*) as z`)
|
f(`* | stats by(x) uniq(a,*,b) z`, `* | stats by (x) uniq(*) as z`)
|
||||||
|
|
||||||
// stats pipe uniq_array
|
// stats pipe uniq_values
|
||||||
f(`* | stats uniq_array(foo) bar`, `* | stats uniq_array(foo) as bar`)
|
f(`* | stats uniq_values(foo) bar`, `* | stats uniq_values(foo) as bar`)
|
||||||
f(`* | stats by(x, y) uniq_array(foo, bar) as baz`, `* | stats by (x, y) uniq_array(foo, bar) as baz`)
|
f(`* | stats by(x, y) uniq_values(foo, bar) as baz`, `* | stats by (x, y) uniq_values(foo, bar) as baz`)
|
||||||
f(`* | stats by(x) uniq_array(*) y`, `* | stats by (x) uniq_array(*) as y`)
|
f(`* | stats by(x) uniq_values(*) y`, `* | stats by (x) uniq_values(*) as y`)
|
||||||
f(`* | stats by(x) uniq_array() y`, `* | stats by (x) uniq_array(*) as y`)
|
f(`* | stats by(x) uniq_values() y`, `* | stats by (x) uniq_values(*) as y`)
|
||||||
f(`* | stats by(x) uniq_array(a,*,b) y`, `* | stats by (x) uniq_array(*) as y`)
|
f(`* | stats by(x) uniq_values(a,*,b) y`, `* | stats by (x) uniq_values(*) as y`)
|
||||||
|
|
||||||
// stats pipe multiple funcs
|
// stats pipe multiple funcs
|
||||||
f(`* | stats count() "foo.bar:baz", uniq(a) bar`, `* | stats count(*) as "foo.bar:baz", uniq(a) as bar`)
|
f(`* | stats count() "foo.bar:baz", uniq(a) bar`, `* | stats count(*) as "foo.bar:baz", uniq(a) as bar`)
|
||||||
|
@ -1228,9 +1228,9 @@ func TestParseQueryFailure(t *testing.T) {
|
||||||
f(`foo | stats uniq`)
|
f(`foo | stats uniq`)
|
||||||
f(`foo | stats uniq()`)
|
f(`foo | stats uniq()`)
|
||||||
|
|
||||||
// invalid stats uniq_array
|
// invalid stats uniq_values
|
||||||
f(`foo | stats uniq_array`)
|
f(`foo | stats uniq_values`)
|
||||||
f(`foo | stats uniq_array()`)
|
f(`foo | stats uniq_values()`)
|
||||||
|
|
||||||
// invalid stats grouping fields
|
// invalid stats grouping fields
|
||||||
f(`foo | stats by(foo:bar) count() baz`)
|
f(`foo | stats by(foo:bar) count() baz`)
|
||||||
|
|
|
@ -506,10 +506,10 @@ func parseStatsFunc(lex *lexer) (statsFunc, string, error) {
|
||||||
return nil, "", fmt.Errorf("cannot parse 'avg' func: %w", err)
|
return nil, "", fmt.Errorf("cannot parse 'avg' func: %w", err)
|
||||||
}
|
}
|
||||||
sf = sas
|
sf = sas
|
||||||
case lex.isKeyword("uniq_array"):
|
case lex.isKeyword("uniq_values"):
|
||||||
sus, err := parseStatsUniqArray(lex)
|
sus, err := parseStatsUniqValues(lex)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, "", fmt.Errorf("cannot parse 'uniq_array' func: %w", err)
|
return nil, "", fmt.Errorf("cannot parse 'uniq_values' func: %w", err)
|
||||||
}
|
}
|
||||||
sf = sus
|
sf = sus
|
||||||
default:
|
default:
|
||||||
|
|
|
@ -10,21 +10,21 @@ import (
|
||||||
"github.com/VictoriaMetrics/VictoriaMetrics/lib/bytesutil"
|
"github.com/VictoriaMetrics/VictoriaMetrics/lib/bytesutil"
|
||||||
)
|
)
|
||||||
|
|
||||||
type statsUniqArray struct {
|
type statsUniqValues struct {
|
||||||
fields []string
|
fields []string
|
||||||
containsStar bool
|
containsStar bool
|
||||||
}
|
}
|
||||||
|
|
||||||
func (su *statsUniqArray) String() string {
|
func (su *statsUniqValues) String() string {
|
||||||
return "uniq_array(" + fieldNamesString(su.fields) + ")"
|
return "uniq_values(" + fieldNamesString(su.fields) + ")"
|
||||||
}
|
}
|
||||||
|
|
||||||
func (su *statsUniqArray) neededFields() []string {
|
func (su *statsUniqValues) neededFields() []string {
|
||||||
return su.fields
|
return su.fields
|
||||||
}
|
}
|
||||||
|
|
||||||
func (su *statsUniqArray) newStatsProcessor() (statsProcessor, int) {
|
func (su *statsUniqValues) newStatsProcessor() (statsProcessor, int) {
|
||||||
sup := &statsUniqArrayProcessor{
|
sup := &statsUniqValuesProcessor{
|
||||||
su: su,
|
su: su,
|
||||||
|
|
||||||
m: make(map[string]struct{}),
|
m: make(map[string]struct{}),
|
||||||
|
@ -32,13 +32,13 @@ func (su *statsUniqArray) newStatsProcessor() (statsProcessor, int) {
|
||||||
return sup, int(unsafe.Sizeof(*sup))
|
return sup, int(unsafe.Sizeof(*sup))
|
||||||
}
|
}
|
||||||
|
|
||||||
type statsUniqArrayProcessor struct {
|
type statsUniqValuesProcessor struct {
|
||||||
su *statsUniqArray
|
su *statsUniqValues
|
||||||
|
|
||||||
m map[string]struct{}
|
m map[string]struct{}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (sup *statsUniqArrayProcessor) updateStatsForAllRows(br *blockResult) int {
|
func (sup *statsUniqValuesProcessor) updateStatsForAllRows(br *blockResult) int {
|
||||||
stateSizeIncrease := 0
|
stateSizeIncrease := 0
|
||||||
if sup.su.containsStar {
|
if sup.su.containsStar {
|
||||||
columns := br.getColumns()
|
columns := br.getColumns()
|
||||||
|
@ -54,7 +54,7 @@ func (sup *statsUniqArrayProcessor) updateStatsForAllRows(br *blockResult) int {
|
||||||
return stateSizeIncrease
|
return stateSizeIncrease
|
||||||
}
|
}
|
||||||
|
|
||||||
func (sup *statsUniqArrayProcessor) updateStatsForAllRowsColumn(c *blockResultColumn, br *blockResult) int {
|
func (sup *statsUniqValuesProcessor) updateStatsForAllRowsColumn(c *blockResultColumn, br *blockResult) int {
|
||||||
m := sup.m
|
m := sup.m
|
||||||
stateSizeIncrease := 0
|
stateSizeIncrease := 0
|
||||||
if c.isConst {
|
if c.isConst {
|
||||||
|
@ -107,7 +107,7 @@ func (sup *statsUniqArrayProcessor) updateStatsForAllRowsColumn(c *blockResultCo
|
||||||
return stateSizeIncrease
|
return stateSizeIncrease
|
||||||
}
|
}
|
||||||
|
|
||||||
func (sup *statsUniqArrayProcessor) updateStatsForRow(br *blockResult, rowIdx int) int {
|
func (sup *statsUniqValuesProcessor) updateStatsForRow(br *blockResult, rowIdx int) int {
|
||||||
stateSizeIncrease := 0
|
stateSizeIncrease := 0
|
||||||
if sup.su.containsStar {
|
if sup.su.containsStar {
|
||||||
columns := br.getColumns()
|
columns := br.getColumns()
|
||||||
|
@ -123,7 +123,7 @@ func (sup *statsUniqArrayProcessor) updateStatsForRow(br *blockResult, rowIdx in
|
||||||
return stateSizeIncrease
|
return stateSizeIncrease
|
||||||
}
|
}
|
||||||
|
|
||||||
func (sup *statsUniqArrayProcessor) updateStatsForRowColumn(c *blockResultColumn, br *blockResult, rowIdx int) int {
|
func (sup *statsUniqValuesProcessor) updateStatsForRowColumn(c *blockResultColumn, br *blockResult, rowIdx int) int {
|
||||||
m := sup.m
|
m := sup.m
|
||||||
stateSizeIncrease := 0
|
stateSizeIncrease := 0
|
||||||
if c.isConst {
|
if c.isConst {
|
||||||
|
@ -170,8 +170,8 @@ func (sup *statsUniqArrayProcessor) updateStatsForRowColumn(c *blockResultColumn
|
||||||
return stateSizeIncrease
|
return stateSizeIncrease
|
||||||
}
|
}
|
||||||
|
|
||||||
func (sup *statsUniqArrayProcessor) mergeState(sfp statsProcessor) {
|
func (sup *statsUniqValuesProcessor) mergeState(sfp statsProcessor) {
|
||||||
src := sfp.(*statsUniqArrayProcessor)
|
src := sfp.(*statsUniqValuesProcessor)
|
||||||
m := sup.m
|
m := sup.m
|
||||||
for k := range src.m {
|
for k := range src.m {
|
||||||
if _, ok := m[k]; !ok {
|
if _, ok := m[k]; !ok {
|
||||||
|
@ -180,7 +180,7 @@ func (sup *statsUniqArrayProcessor) mergeState(sfp statsProcessor) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (sup *statsUniqArrayProcessor) finalizeStats() string {
|
func (sup *statsUniqValuesProcessor) finalizeStats() string {
|
||||||
if len(sup.m) == 0 {
|
if len(sup.m) == 0 {
|
||||||
return "[]"
|
return "[]"
|
||||||
}
|
}
|
||||||
|
@ -214,12 +214,12 @@ func (sup *statsUniqArrayProcessor) finalizeStats() string {
|
||||||
return bytesutil.ToUnsafeString(b)
|
return bytesutil.ToUnsafeString(b)
|
||||||
}
|
}
|
||||||
|
|
||||||
func parseStatsUniqArray(lex *lexer) (*statsUniqArray, error) {
|
func parseStatsUniqValues(lex *lexer) (*statsUniqValues, error) {
|
||||||
fields, err := parseFieldNamesForStatsFunc(lex, "uniq_array")
|
fields, err := parseFieldNamesForStatsFunc(lex, "uniq_values")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
su := &statsUniqArray{
|
su := &statsUniqValues{
|
||||||
fields: fields,
|
fields: fields,
|
||||||
containsStar: slices.Contains(fields, "*"),
|
containsStar: slices.Contains(fields, "*"),
|
||||||
}
|
}
|
Loading…
Reference in a new issue