mirror of
https://github.com/VictoriaMetrics/VictoriaMetrics.git
synced 2024-12-31 15:06:26 +00:00
wip
This commit is contained in:
parent
07d244dab0
commit
c2050495c4
4 changed files with 17 additions and 8 deletions
|
@ -1720,10 +1720,10 @@ _time:5m | uniq by (host, path)
|
||||||
|
|
||||||
The unique entries are returned in arbitrary order. Use [`sort` pipe](#sort-pipe) in order to sort them if needed.
|
The unique entries are returned in arbitrary order. Use [`sort` pipe](#sort-pipe) in order to sort them if needed.
|
||||||
|
|
||||||
Add `hits` after `uniq by (...)` in order to return the number of matching logs per each field value:
|
Add `with hits` after `uniq by (...)` in order to return the number of matching logs per each field value:
|
||||||
|
|
||||||
```logsql
|
```logsql
|
||||||
_time:5m | uniq by (host) hits
|
_time:5m | uniq by (host) with hits
|
||||||
```
|
```
|
||||||
|
|
||||||
Unique entries are stored in memory during query execution. Big number of unique selected entries may require a lot of memory.
|
Unique entries are stored in memory during query execution. Big number of unique selected entries may require a lot of memory.
|
||||||
|
|
|
@ -32,7 +32,7 @@ func (pu *pipeUniq) String() string {
|
||||||
s += " by (" + fieldNamesString(pu.byFields) + ")"
|
s += " by (" + fieldNamesString(pu.byFields) + ")"
|
||||||
}
|
}
|
||||||
if pu.hitsFieldName != "" {
|
if pu.hitsFieldName != "" {
|
||||||
s += " hits"
|
s += " with hits"
|
||||||
}
|
}
|
||||||
if pu.limit > 0 {
|
if pu.limit > 0 {
|
||||||
s += fmt.Sprintf(" limit %d", pu.limit)
|
s += fmt.Sprintf(" limit %d", pu.limit)
|
||||||
|
@ -477,6 +477,12 @@ func parsePipeUniq(lex *lexer) (*pipeUniq, error) {
|
||||||
pu.byFields = bfs
|
pu.byFields = bfs
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if lex.isKeyword("with") {
|
||||||
|
lex.nextToken()
|
||||||
|
if !lex.isKeyword("hits") {
|
||||||
|
return nil, fmt.Errorf("missing 'hits' after 'with'")
|
||||||
|
}
|
||||||
|
}
|
||||||
if lex.isKeyword("hits") {
|
if lex.isKeyword("hits") {
|
||||||
lex.nextToken()
|
lex.nextToken()
|
||||||
hitsFieldName := "hits"
|
hitsFieldName := "hits"
|
||||||
|
|
|
@ -11,15 +11,15 @@ func TestParsePipeUniqSuccess(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
f(`uniq`)
|
f(`uniq`)
|
||||||
f(`uniq hits`)
|
f(`uniq with hits`)
|
||||||
f(`uniq limit 10`)
|
f(`uniq limit 10`)
|
||||||
f(`uniq hits limit 10`)
|
f(`uniq with hits limit 10`)
|
||||||
f(`uniq by (x)`)
|
f(`uniq by (x)`)
|
||||||
f(`uniq by (x) limit 10`)
|
f(`uniq by (x) limit 10`)
|
||||||
f(`uniq by (x, y)`)
|
f(`uniq by (x, y)`)
|
||||||
f(`uniq by (x, y) hits`)
|
f(`uniq by (x, y) with hits`)
|
||||||
f(`uniq by (x, y) limit 10`)
|
f(`uniq by (x, y) limit 10`)
|
||||||
f(`uniq by (x, y) hits limit 10`)
|
f(`uniq by (x, y) with hits limit 10`)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestParsePipeUniqFailure(t *testing.T) {
|
func TestParsePipeUniqFailure(t *testing.T) {
|
||||||
|
@ -33,6 +33,7 @@ func TestParsePipeUniqFailure(t *testing.T) {
|
||||||
f(`uniq by hits`)
|
f(`uniq by hits`)
|
||||||
f(`uniq by(x) limit`)
|
f(`uniq by(x) limit`)
|
||||||
f(`uniq by(x) limit foo`)
|
f(`uniq by(x) limit foo`)
|
||||||
|
f(`uniq by (x) with`)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestPipeUniq(t *testing.T) {
|
func TestPipeUniq(t *testing.T) {
|
||||||
|
@ -365,10 +366,12 @@ func TestPipeUniqUpdateNeededFields(t *testing.T) {
|
||||||
f("uniq by()", "*", "", "*", "")
|
f("uniq by()", "*", "", "*", "")
|
||||||
f("uniq by(*)", "*", "", "*", "")
|
f("uniq by(*)", "*", "", "*", "")
|
||||||
f("uniq by(f1,f2)", "*", "", "f1,f2", "")
|
f("uniq by(f1,f2)", "*", "", "f1,f2", "")
|
||||||
|
f("uniq by(f1,f2) with hits", "*", "", "f1,f2", "")
|
||||||
|
|
||||||
// all the needed fields, unneeded fields do not intersect with src
|
// all the needed fields, unneeded fields do not intersect with src
|
||||||
f("uniq by(s1, s2)", "*", "f1,f2", "s1,s2", "")
|
f("uniq by(s1, s2)", "*", "f1,f2", "s1,s2", "")
|
||||||
f("uniq", "*", "f1,f2", "*", "")
|
f("uniq", "*", "f1,f2", "*", "")
|
||||||
|
f("uniq with hits", "*", "f1,f2", "*", "")
|
||||||
|
|
||||||
// all the needed fields, unneeded fields intersect with src
|
// all the needed fields, unneeded fields intersect with src
|
||||||
f("uniq by(s1, s2)", "*", "s1,f1,f2", "s1,s2", "")
|
f("uniq by(s1, s2)", "*", "s1,f1,f2", "s1,s2", "")
|
||||||
|
|
|
@ -229,7 +229,7 @@ func (s *Storage) getFieldValuesNoHits(ctx context.Context, tenantIDs []TenantID
|
||||||
func (s *Storage) GetFieldValues(ctx context.Context, tenantIDs []TenantID, q *Query, fieldName string, limit uint64) ([]ValueWithHits, error) {
|
func (s *Storage) GetFieldValues(ctx context.Context, tenantIDs []TenantID, q *Query, fieldName string, limit uint64) ([]ValueWithHits, error) {
|
||||||
pipes := append([]pipe{}, q.pipes...)
|
pipes := append([]pipe{}, q.pipes...)
|
||||||
quotedFieldName := quoteTokenIfNeeded(fieldName)
|
quotedFieldName := quoteTokenIfNeeded(fieldName)
|
||||||
pipeStr := fmt.Sprintf("uniq by (%s) hits limit %d", quotedFieldName, limit)
|
pipeStr := fmt.Sprintf("uniq by (%s) with hits limit %d", quotedFieldName, limit)
|
||||||
lex := newLexer(pipeStr)
|
lex := newLexer(pipeStr)
|
||||||
|
|
||||||
pu, err := parsePipeUniq(lex)
|
pu, err := parsePipeUniq(lex)
|
||||||
|
|
Loading…
Reference in a new issue