VictoriaMetrics/lib/logstorage/pipe_extract.go

303 lines
6.4 KiB
Go
Raw Normal View History

2024-05-20 02:08:30 +00:00
package logstorage
import (
"fmt"
2024-05-25 19:36:16 +00:00
"unsafe"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/slicesutil"
2024-05-20 02:08:30 +00:00
)
2024-05-22 19:01:20 +00:00
// pipeExtract processes '| extract ...' pipe.
2024-05-20 02:08:30 +00:00
//
// See https://docs.victoriametrics.com/victorialogs/logsql/#extract-pipe
type pipeExtract struct {
fromField string
2024-05-22 19:01:20 +00:00
2024-05-24 22:30:58 +00:00
ptn *pattern
2024-05-22 19:01:20 +00:00
patternStr string
2024-05-20 02:08:30 +00:00
2024-05-24 22:30:58 +00:00
keepOriginalFields bool
skipEmptyResults bool
2024-05-22 19:01:20 +00:00
// iff is an optional filter for skipping the extract func
iff *ifFilter
2024-05-20 02:08:30 +00:00
}
func (pe *pipeExtract) String() string {
s := "extract"
2024-05-22 19:01:20 +00:00
if pe.iff != nil {
s += " " + pe.iff.String()
}
s += " " + quoteTokenIfNeeded(pe.patternStr)
2024-05-20 02:08:30 +00:00
if !isMsgFieldName(pe.fromField) {
s += " from " + quoteTokenIfNeeded(pe.fromField)
}
2024-05-24 22:30:58 +00:00
if pe.keepOriginalFields {
s += " keep_original_fields"
}
if pe.skipEmptyResults {
s += " skip_empty_results"
}
2024-05-20 02:08:30 +00:00
return s
}
2024-06-27 12:18:42 +00:00
func (pe *pipeExtract) canLiveTail() bool {
return true
}
2024-05-25 19:36:16 +00:00
func (pe *pipeExtract) optimize() {
pe.iff.optimizeFilterIn()
}
func (pe *pipeExtract) hasFilterInWithQuery() bool {
return pe.iff.hasFilterInWithQuery()
}
func (pe *pipeExtract) initFilterInValues(cache map[string][]string, getFieldValuesFunc getFieldValuesFunc) (pipe, error) {
iffNew, err := pe.iff.initFilterInValues(cache, getFieldValuesFunc)
if err != nil {
return nil, err
}
peNew := *pe
peNew.iff = iffNew
return &peNew, nil
}
2024-05-20 02:08:30 +00:00
func (pe *pipeExtract) updateNeededFields(neededFields, unneededFields fieldsSet) {
2024-05-30 14:19:23 +00:00
if neededFields.isEmpty() {
if pe.iff != nil {
neededFields.addFields(pe.iff.neededFields)
}
return
}
2024-05-20 02:08:30 +00:00
if neededFields.contains("*") {
unneededFieldsOrig := unneededFields.clone()
needFromField := false
2024-05-22 19:01:20 +00:00
for _, step := range pe.ptn.steps {
2024-05-28 17:29:41 +00:00
if step.field == "" {
continue
}
if !unneededFieldsOrig.contains(step.field) {
needFromField = true
}
if !pe.keepOriginalFields && !pe.skipEmptyResults {
unneededFields.add(step.field)
2024-05-20 02:08:30 +00:00
}
}
if needFromField {
unneededFields.remove(pe.fromField)
2024-05-22 19:01:20 +00:00
if pe.iff != nil {
unneededFields.removeFields(pe.iff.neededFields)
}
2024-05-20 02:08:30 +00:00
} else {
unneededFields.add(pe.fromField)
}
} else {
2024-05-22 19:01:20 +00:00
neededFieldsOrig := neededFields.clone()
2024-05-20 02:08:30 +00:00
needFromField := false
2024-05-22 19:01:20 +00:00
for _, step := range pe.ptn.steps {
2024-05-28 17:29:41 +00:00
if step.field == "" {
continue
}
if neededFieldsOrig.contains(step.field) {
2024-05-20 02:08:30 +00:00
needFromField = true
2024-05-24 22:30:58 +00:00
if !pe.keepOriginalFields && !pe.skipEmptyResults {
neededFields.remove(step.field)
}
2024-05-20 02:08:30 +00:00
}
}
if needFromField {
neededFields.add(pe.fromField)
2024-05-22 19:01:20 +00:00
if pe.iff != nil {
neededFields.addFields(pe.iff.neededFields)
}
2024-05-20 02:08:30 +00:00
}
}
}
2024-05-25 19:36:16 +00:00
func (pe *pipeExtract) newPipeProcessor(workersCount int, _ <-chan struct{}, _ func(), ppNext pipeProcessor) pipeProcessor {
return &pipeExtractProcessor{
pe: pe,
ppNext: ppNext,
shards: make([]pipeExtractProcessorShard, workersCount),
}
}
type pipeExtractProcessor struct {
pe *pipeExtract
ppNext pipeProcessor
shards []pipeExtractProcessorShard
}
type pipeExtractProcessorShard struct {
pipeExtractProcessorShardNopad
// The padding prevents false sharing on widespread platforms with 128 mod (cache line size) = 0 .
_ [128 - unsafe.Sizeof(pipeExtractProcessorShardNopad{})%128]byte
}
type pipeExtractProcessorShardNopad struct {
bm bitmap
ptn *pattern
resultColumns []*blockResultColumn
resultValues []string
rcs []resultColumn
a arena
}
func (pep *pipeExtractProcessor) writeBlock(workerID uint, br *blockResult) {
if len(br.timestamps) == 0 {
return
2024-05-20 02:08:30 +00:00
}
2024-05-25 19:36:16 +00:00
pe := pep.pe
shard := &pep.shards[workerID]
bm := &shard.bm
bm.init(len(br.timestamps))
bm.setBits()
if iff := pe.iff; iff != nil {
iff.f.applyToBlockResult(br, bm)
if bm.isZero() {
pep.ppNext.writeBlock(workerID, br)
return
2024-05-20 02:08:30 +00:00
}
}
2024-05-25 19:36:16 +00:00
if shard.ptn == nil {
shard.ptn = pe.ptn.clone()
}
ptn := shard.ptn
shard.rcs = slicesutil.SetLength(shard.rcs, len(ptn.fields))
rcs := shard.rcs
for i := range ptn.fields {
rcs[i].name = ptn.fields[i].name
}
c := br.getColumnByName(pe.fromField)
values := c.getValues(br)
shard.resultColumns = slicesutil.SetLength(shard.resultColumns, len(rcs))
resultColumns := shard.resultColumns
for i := range resultColumns {
resultColumns[i] = br.getColumnByName(rcs[i].name)
}
shard.resultValues = slicesutil.SetLength(shard.resultValues, len(rcs))
resultValues := shard.resultValues
hadUpdates := false
vPrev := ""
for rowIdx, v := range values {
if bm.isSetBit(rowIdx) {
if !hadUpdates || vPrev != v {
vPrev = v
hadUpdates = true
ptn.apply(v)
for i, f := range ptn.fields {
v := *f.value
if v == "" && pe.skipEmptyResults || pe.keepOriginalFields {
c := resultColumns[i]
if vOrig := c.getValueAtRow(br, rowIdx); vOrig != "" {
v = vOrig
}
} else {
v = shard.a.copyString(v)
}
resultValues[i] = v
}
}
} else {
for i, c := range resultColumns {
resultValues[i] = c.getValueAtRow(br, rowIdx)
}
}
for i, v := range resultValues {
rcs[i].addValue(v)
}
}
for i := range rcs {
br.addResultColumn(&rcs[i])
}
pep.ppNext.writeBlock(workerID, br)
for i := range rcs {
rcs[i].reset()
}
shard.a.reset()
}
func (pep *pipeExtractProcessor) flush() error {
return nil
2024-05-20 02:08:30 +00:00
}
func parsePipeExtract(lex *lexer) (*pipeExtract, error) {
if !lex.isKeyword("extract") {
return nil, fmt.Errorf("unexpected token: %q; want %q", lex.token, "extract")
}
lex.nextToken()
2024-05-22 19:01:20 +00:00
// parse optional if (...)
var iff *ifFilter
if lex.isKeyword("if") {
f, err := parseIfFilter(lex)
2024-05-20 02:08:30 +00:00
if err != nil {
2024-05-22 19:01:20 +00:00
return nil, err
2024-05-20 02:08:30 +00:00
}
2024-05-22 19:01:20 +00:00
iff = f
2024-05-20 02:08:30 +00:00
}
2024-05-22 19:01:20 +00:00
// parse pattern
patternStr, err := getCompoundToken(lex)
2024-05-20 02:08:30 +00:00
if err != nil {
return nil, fmt.Errorf("cannot read 'pattern': %w", err)
}
2024-05-22 19:01:20 +00:00
ptn, err := parsePattern(patternStr)
2024-05-20 02:08:30 +00:00
if err != nil {
2024-05-22 19:01:20 +00:00
return nil, fmt.Errorf("cannot parse 'pattern' %q: %w", patternStr, err)
2024-05-20 02:08:30 +00:00
}
2024-05-22 19:01:20 +00:00
// parse optional 'from ...' part
fromField := "_msg"
if lex.isKeyword("from") {
lex.nextToken()
f, err := parseFieldName(lex)
if err != nil {
return nil, fmt.Errorf("cannot parse 'from' field name: %w", err)
2024-05-20 02:08:30 +00:00
}
2024-05-22 19:01:20 +00:00
fromField = f
2024-05-20 02:08:30 +00:00
}
2024-05-24 22:30:58 +00:00
keepOriginalFields := false
skipEmptyResults := false
switch {
case lex.isKeyword("keep_original_fields"):
lex.nextToken()
keepOriginalFields = true
case lex.isKeyword("skip_empty_results"):
lex.nextToken()
skipEmptyResults = true
}
2024-05-22 19:01:20 +00:00
pe := &pipeExtract{
2024-05-24 22:30:58 +00:00
fromField: fromField,
ptn: ptn,
patternStr: patternStr,
keepOriginalFields: keepOriginalFields,
skipEmptyResults: skipEmptyResults,
iff: iff,
2024-05-20 02:08:30 +00:00
}
2024-05-22 19:01:20 +00:00
return pe, nil
2024-05-20 02:08:30 +00:00
}