mirror of
https://github.com/VictoriaMetrics/VictoriaMetrics.git
synced 2024-12-31 15:06:26 +00:00
wip
This commit is contained in:
parent
eac0722068
commit
c5734e18b9
9 changed files with 164 additions and 34 deletions
|
@ -3,7 +3,6 @@ package logstorage
|
|||
import (
|
||||
"math"
|
||||
"slices"
|
||||
"strings"
|
||||
"sync/atomic"
|
||||
"time"
|
||||
"unsafe"
|
||||
|
@ -201,19 +200,6 @@ func (br *blockResult) sizeBytes() int {
|
|||
return n
|
||||
}
|
||||
|
||||
// addResultColumns adds the given rcs to br.
|
||||
//
|
||||
// The br is valid only until rcs are modified.
|
||||
func (br *blockResult) addResultColumns(rcs []resultColumn) {
|
||||
if len(rcs) == 0 || len(rcs[0].values) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
for i := range rcs {
|
||||
br.addResultColumn(&rcs[i])
|
||||
}
|
||||
}
|
||||
|
||||
// setResultColumns sets the given rcs as br columns.
|
||||
//
|
||||
// The br is valid only until rcs are modified.
|
||||
|
@ -1275,14 +1261,6 @@ func (br *blockResult) renameSingleColumn(srcName, dstName string) {
|
|||
br.csInitialized = false
|
||||
}
|
||||
|
||||
func debugColumnNames(cs []*blockResultColumn) string {
|
||||
a := make([]string, len(cs))
|
||||
for i, c := range cs {
|
||||
a[i] = c.name
|
||||
}
|
||||
return strings.Join(a, ",")
|
||||
}
|
||||
|
||||
// deleteColumns deletes columns with the given columnNames.
|
||||
func (br *blockResult) deleteColumns(columnNames []string) {
|
||||
if len(columnNames) == 0 {
|
||||
|
@ -1811,6 +1789,11 @@ type resultColumn struct {
|
|||
values []string
|
||||
}
|
||||
|
||||
func (rc *resultColumn) reset() {
|
||||
rc.name = ""
|
||||
rc.resetValues()
|
||||
}
|
||||
|
||||
func (rc *resultColumn) resetValues() {
|
||||
clear(rc.values)
|
||||
rc.values = rc.values[:0]
|
||||
|
@ -1819,8 +1802,8 @@ func (rc *resultColumn) resetValues() {
|
|||
func appendResultColumnWithName(dst []resultColumn, name string) []resultColumn {
|
||||
dst = slicesutil.SetLength(dst, len(dst)+1)
|
||||
rc := &dst[len(dst)-1]
|
||||
rc.resetValues()
|
||||
rc.name = name
|
||||
rc.resetValues()
|
||||
return dst
|
||||
}
|
||||
|
||||
|
|
|
@ -60,7 +60,9 @@ func TestPipeDelete(t *testing.T) {
|
|||
{"_msg", `{"foo":"bar"}`},
|
||||
{"a", `test`},
|
||||
},
|
||||
}, [][]Field{})
|
||||
}, [][]Field{
|
||||
{},
|
||||
})
|
||||
|
||||
// delete non-existing fields
|
||||
f("delete foo, _msg, bar", [][]Field{
|
||||
|
@ -93,6 +95,8 @@ func TestPipeDelete(t *testing.T) {
|
|||
{"b", "df"},
|
||||
},
|
||||
}, [][]Field{
|
||||
{},
|
||||
{},
|
||||
{
|
||||
{"b", `baz`},
|
||||
{"c", "d"},
|
||||
|
|
|
@ -68,10 +68,9 @@ func (pe *pipeExtract) updateNeededFields(neededFields, unneededFields fieldsSet
|
|||
func (pe *pipeExtract) newPipeProcessor(workersCount int, _ <-chan struct{}, _ func(), ppBase pipeProcessor) pipeProcessor {
|
||||
shards := make([]pipeExtractProcessorShard, workersCount)
|
||||
for i := range shards {
|
||||
ef := newPattern(pe.steps)
|
||||
shards[i] = pipeExtractProcessorShard{
|
||||
pipeExtractProcessorShardNopad: pipeExtractProcessorShardNopad{
|
||||
ef: ef,
|
||||
ef: newPattern(pe.steps),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
|
|
@ -516,7 +516,7 @@ func (wctx *pipeSortWriteContext) writeNextRow(shard *pipeSortProcessorShard) {
|
|||
}
|
||||
}
|
||||
if !areEqualColumns {
|
||||
// send the current block to bbBase and construct a block with new set of columns
|
||||
// send the current block to ppBase and construct a block with new set of columns
|
||||
wctx.flush()
|
||||
|
||||
rcs = wctx.rcs[:0]
|
||||
|
|
|
@ -154,6 +154,28 @@ func TestPipeSort(t *testing.T) {
|
|||
},
|
||||
})
|
||||
|
||||
// Sort by multiple fields with limit desc
|
||||
f("sort by (a, b) desc limit 1", [][]Field{
|
||||
{
|
||||
{"_msg", `abc`},
|
||||
{"a", `2`},
|
||||
{"b", `3`},
|
||||
},
|
||||
{
|
||||
{"_msg", `def`},
|
||||
{"a", `1`},
|
||||
},
|
||||
{
|
||||
{"a", `2`},
|
||||
{"b", `54`},
|
||||
},
|
||||
}, [][]Field{
|
||||
{
|
||||
{"a", `2`},
|
||||
{"b", `54`},
|
||||
},
|
||||
})
|
||||
|
||||
// Sort by multiple fields with offset
|
||||
f("sort by (a, b) offset 1", [][]Field{
|
||||
{
|
||||
|
@ -203,6 +225,28 @@ func TestPipeSort(t *testing.T) {
|
|||
{"b", `3`},
|
||||
},
|
||||
})
|
||||
|
||||
// Sort by multiple fields with offset and limit
|
||||
f("sort by (a, b) desc offset 2 limit 100", [][]Field{
|
||||
{
|
||||
{"_msg", `abc`},
|
||||
{"a", `2`},
|
||||
{"b", `3`},
|
||||
},
|
||||
{
|
||||
{"_msg", `def`},
|
||||
{"a", `1`},
|
||||
},
|
||||
{
|
||||
{"a", `2`},
|
||||
{"b", `54`},
|
||||
},
|
||||
}, [][]Field{
|
||||
{
|
||||
{"_msg", `def`},
|
||||
{"a", `1`},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func TestPipeSortUpdateNeededFields(t *testing.T) {
|
||||
|
|
|
@ -4,6 +4,85 @@ import (
|
|||
"testing"
|
||||
)
|
||||
|
||||
func TestParsePipeStatsSuccess(t *testing.T) {
|
||||
f := func(pipeStr string) {
|
||||
t.Helper()
|
||||
expectParsePipeSuccess(t, pipeStr)
|
||||
}
|
||||
|
||||
f(`stats count(*) as rows`)
|
||||
f(`stats by (x) count(*) as rows, count_uniq(x) as uniqs`)
|
||||
f(`stats by (_time:month offset 6.5h, y) count(*) as rows, count_uniq(x) as uniqs`)
|
||||
f(`stats by (_time:month offset 6.5h, y) count(*) if (q:w) as rows, count_uniq(x) as uniqs`)
|
||||
}
|
||||
|
||||
func TestParsePipeStatsFailure(t *testing.T) {
|
||||
f := func(pipeStr string) {
|
||||
t.Helper()
|
||||
expectParsePipeFailure(t, pipeStr)
|
||||
}
|
||||
|
||||
f(`stats`)
|
||||
f(`stats by`)
|
||||
f(`stats foo`)
|
||||
f(`stats count`)
|
||||
f(`stats if (x:y)`)
|
||||
f(`stats by(x) foo`)
|
||||
f(`stats by(x:abc) count() rows`)
|
||||
f(`stats by(x:1h offset) count () rows`)
|
||||
f(`stats by(x:1h offset foo) count() rows`)
|
||||
}
|
||||
|
||||
func TestPipeStats(t *testing.T) {
|
||||
f := func(pipeStr string, rows, rowsExpected [][]Field) {
|
||||
t.Helper()
|
||||
expectPipeResults(t, pipeStr, rows, rowsExpected)
|
||||
}
|
||||
|
||||
f("stats count(*) as rows", [][]Field{
|
||||
{
|
||||
{"_msg", `abc`},
|
||||
{"a", `2`},
|
||||
{"b", `3`},
|
||||
},
|
||||
{
|
||||
{"_msg", `def`},
|
||||
{"a", `1`},
|
||||
},
|
||||
{
|
||||
{"a", `2`},
|
||||
{"b", `54`},
|
||||
},
|
||||
}, [][]Field{
|
||||
{
|
||||
{"rows", "3"},
|
||||
},
|
||||
})
|
||||
|
||||
f("stats by (a) count(*) as rows", [][]Field{
|
||||
{
|
||||
{"_msg", `abc`},
|
||||
{"a", `2`},
|
||||
{"b", `3`},
|
||||
},
|
||||
{
|
||||
{"_msg", `def`},
|
||||
{"a", `1`},
|
||||
},
|
||||
{
|
||||
{"a", `2`},
|
||||
{"b", `54`},
|
||||
},
|
||||
}, [][]Field{
|
||||
{
|
||||
{"a", "1"},
|
||||
{"rows", "1"},
|
||||
{"a", "2"},
|
||||
{"rows", "2"},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func TestPipeStatsUpdateNeededFields(t *testing.T) {
|
||||
f := func(s, neededFields, unneededFields, neededFieldsExpected, unneededFieldsExpected string) {
|
||||
t.Helper()
|
||||
|
|
|
@ -457,7 +457,7 @@ func (wctx *pipeTopkWriteContext) writeNextRow(shard *pipeTopkProcessorShard) bo
|
|||
}
|
||||
}
|
||||
if !areEqualColumns {
|
||||
// send the current block to bbBase and construct a block with new set of columns
|
||||
// send the current block to ppBase and construct a block with new set of columns
|
||||
wctx.flush()
|
||||
|
||||
rcs = wctx.rcs[:0]
|
||||
|
|
|
@ -354,7 +354,7 @@ func (wctx *pipeUniqWriteContext) writeRow(rowFields []Field) {
|
|||
}
|
||||
}
|
||||
if !areEqualColumns {
|
||||
// send the current block to bbBase and construct a block with new set of columns
|
||||
// send the current block to ppBase and construct a block with new set of columns
|
||||
wctx.flush()
|
||||
|
||||
rcs = wctx.rcs[:0]
|
||||
|
|
|
@ -22,10 +22,15 @@ func (uctx *fieldsUnpackerContext) resetFields() {
|
|||
}
|
||||
|
||||
func (uctx *fieldsUnpackerContext) addField(name, value, fieldPrefix string) {
|
||||
nameCopy := ""
|
||||
if fieldPrefix != "" {
|
||||
nameBuf := uctx.a.newBytes(len(fieldPrefix) + len(name))
|
||||
copy(nameBuf, fieldPrefix)
|
||||
copy(nameBuf[len(fieldPrefix):], name)
|
||||
nameCopy := bytesutil.ToUnsafeString(nameBuf)
|
||||
nameCopy = bytesutil.ToUnsafeString(nameBuf)
|
||||
} else {
|
||||
nameCopy = uctx.a.copyString(name)
|
||||
}
|
||||
|
||||
valueCopy := uctx.a.copyString(value)
|
||||
|
||||
|
@ -115,7 +120,23 @@ type pipeUnpackWriteContext struct {
|
|||
valuesLen int
|
||||
}
|
||||
|
||||
func (wctx *pipeUnpackWriteContext) reset() {
|
||||
wctx.brSrc = nil
|
||||
wctx.csSrc = nil
|
||||
wctx.ppBase = nil
|
||||
|
||||
rcs := wctx.rcs
|
||||
for i := range rcs {
|
||||
rcs[i].reset()
|
||||
}
|
||||
wctx.rcs = rcs[:0]
|
||||
|
||||
wctx.valuesLen = 0
|
||||
}
|
||||
|
||||
func (wctx *pipeUnpackWriteContext) init(brSrc *blockResult, ppBase pipeProcessor) {
|
||||
wctx.reset()
|
||||
|
||||
wctx.brSrc = brSrc
|
||||
wctx.csSrc = brSrc.getColumns()
|
||||
wctx.ppBase = ppBase
|
||||
|
@ -135,7 +156,7 @@ func (wctx *pipeUnpackWriteContext) writeRow(rowIdx int, extraFields []Field) {
|
|||
}
|
||||
}
|
||||
if !areEqualColumns {
|
||||
// send the current block to bbBase and construct a block with new set of columns
|
||||
// send the current block to ppBase and construct a block with new set of columns
|
||||
wctx.flush()
|
||||
|
||||
rcs = wctx.rcs[:0]
|
||||
|
|
Loading…
Reference in a new issue