2024-05-20 02:08:30 +00:00
|
|
|
package logstorage
|
|
|
|
|
|
|
|
import (
|
|
|
|
"fmt"
|
2024-05-22 19:01:20 +00:00
|
|
|
"slices"
|
2024-05-20 02:08:30 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
// pipeUnpackLogfmt processes '| unpack_logfmt ...' pipe.
|
|
|
|
//
|
|
|
|
// See https://docs.victoriametrics.com/victorialogs/logsql/#unpack_logfmt-pipe
|
|
|
|
type pipeUnpackLogfmt struct {
|
2024-05-22 19:01:20 +00:00
|
|
|
// fromField is the field to unpack logfmt fields from
|
2024-05-20 02:08:30 +00:00
|
|
|
fromField string
|
|
|
|
|
2024-05-22 19:01:20 +00:00
|
|
|
// fields is an optional list of fields to extract from logfmt.
|
|
|
|
//
|
|
|
|
// if it is empty, then all the fields are extracted.
|
|
|
|
fields []string
|
|
|
|
|
|
|
|
// resultPrefix is prefix to add to unpacked field names
|
2024-05-20 02:08:30 +00:00
|
|
|
resultPrefix string
|
2024-05-22 19:01:20 +00:00
|
|
|
|
2024-05-24 22:30:58 +00:00
|
|
|
keepOriginalFields bool
|
|
|
|
skipEmptyResults bool
|
|
|
|
|
2024-05-22 19:01:20 +00:00
|
|
|
// iff is an optional filter for skipping unpacking logfmt
|
|
|
|
iff *ifFilter
|
2024-05-20 02:08:30 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func (pu *pipeUnpackLogfmt) String() string {
|
|
|
|
s := "unpack_logfmt"
|
2024-05-22 19:01:20 +00:00
|
|
|
if pu.iff != nil {
|
|
|
|
s += " " + pu.iff.String()
|
|
|
|
}
|
2024-05-20 02:08:30 +00:00
|
|
|
if !isMsgFieldName(pu.fromField) {
|
|
|
|
s += " from " + quoteTokenIfNeeded(pu.fromField)
|
|
|
|
}
|
2024-05-22 19:01:20 +00:00
|
|
|
if len(pu.fields) > 0 {
|
|
|
|
s += " fields (" + fieldsToString(pu.fields) + ")"
|
|
|
|
}
|
2024-05-20 02:08:30 +00:00
|
|
|
if pu.resultPrefix != "" {
|
|
|
|
s += " result_prefix " + quoteTokenIfNeeded(pu.resultPrefix)
|
|
|
|
}
|
2024-05-24 22:30:58 +00:00
|
|
|
if pu.keepOriginalFields {
|
|
|
|
s += " keep_original_fields"
|
|
|
|
}
|
|
|
|
if pu.skipEmptyResults {
|
|
|
|
s += " skip_empty_results"
|
|
|
|
}
|
2024-05-20 02:08:30 +00:00
|
|
|
return s
|
|
|
|
}
|
|
|
|
|
|
|
|
func (pu *pipeUnpackLogfmt) updateNeededFields(neededFields, unneededFields fieldsSet) {
|
2024-05-24 22:30:58 +00:00
|
|
|
updateNeededFieldsForUnpackPipe(pu.fromField, pu.fields, pu.keepOriginalFields, pu.skipEmptyResults, pu.iff, neededFields, unneededFields)
|
2024-05-20 02:08:30 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func (pu *pipeUnpackLogfmt) newPipeProcessor(workersCount int, _ <-chan struct{}, _ func(), ppBase pipeProcessor) pipeProcessor {
|
2024-05-22 19:01:20 +00:00
|
|
|
unpackLogfmt := func(uctx *fieldsUnpackerContext, s string) {
|
|
|
|
p := getLogfmtParser()
|
2024-05-20 02:08:30 +00:00
|
|
|
|
2024-05-22 19:01:20 +00:00
|
|
|
p.parse(s)
|
|
|
|
if len(pu.fields) == 0 {
|
|
|
|
for _, f := range p.fields {
|
|
|
|
uctx.addField(f.Name, f.Value)
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
for _, fieldName := range pu.fields {
|
|
|
|
addedField := false
|
|
|
|
for _, f := range p.fields {
|
|
|
|
if f.Name == fieldName {
|
|
|
|
uctx.addField(f.Name, f.Value)
|
|
|
|
addedField = true
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if !addedField {
|
|
|
|
uctx.addField(fieldName, "")
|
|
|
|
}
|
2024-05-20 02:08:30 +00:00
|
|
|
}
|
|
|
|
}
|
2024-05-22 19:01:20 +00:00
|
|
|
|
|
|
|
putLogfmtParser(p)
|
2024-05-20 02:08:30 +00:00
|
|
|
}
|
|
|
|
|
2024-05-24 22:30:58 +00:00
|
|
|
return newPipeUnpackProcessor(workersCount, unpackLogfmt, ppBase, pu.fromField, pu.resultPrefix, pu.keepOriginalFields, pu.skipEmptyResults, pu.iff)
|
2024-05-20 02:08:30 +00:00
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
func parsePipeUnpackLogfmt(lex *lexer) (*pipeUnpackLogfmt, error) {
|
|
|
|
if !lex.isKeyword("unpack_logfmt") {
|
|
|
|
return nil, fmt.Errorf("unexpected token: %q; want %q", lex.token, "unpack_logfmt")
|
|
|
|
}
|
|
|
|
lex.nextToken()
|
|
|
|
|
2024-05-22 19:01:20 +00:00
|
|
|
var iff *ifFilter
|
|
|
|
if lex.isKeyword("if") {
|
|
|
|
f, err := parseIfFilter(lex)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
iff = f
|
|
|
|
}
|
|
|
|
|
2024-05-20 02:08:30 +00:00
|
|
|
fromField := "_msg"
|
|
|
|
if lex.isKeyword("from") {
|
|
|
|
lex.nextToken()
|
|
|
|
f, err := parseFieldName(lex)
|
|
|
|
if err != nil {
|
|
|
|
return nil, fmt.Errorf("cannot parse 'from' field name: %w", err)
|
|
|
|
}
|
|
|
|
fromField = f
|
|
|
|
}
|
|
|
|
|
2024-05-22 19:01:20 +00:00
|
|
|
var fields []string
|
|
|
|
if lex.isKeyword("fields") {
|
|
|
|
lex.nextToken()
|
|
|
|
fs, err := parseFieldNamesInParens(lex)
|
|
|
|
if err != nil {
|
|
|
|
return nil, fmt.Errorf("cannot parse 'fields': %w", err)
|
|
|
|
}
|
|
|
|
fields = fs
|
|
|
|
if slices.Contains(fields, "*") {
|
|
|
|
fields = nil
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-05-20 02:08:30 +00:00
|
|
|
resultPrefix := ""
|
|
|
|
if lex.isKeyword("result_prefix") {
|
|
|
|
lex.nextToken()
|
|
|
|
p, err := getCompoundToken(lex)
|
|
|
|
if err != nil {
|
|
|
|
return nil, fmt.Errorf("cannot parse 'result_prefix': %w", err)
|
|
|
|
}
|
|
|
|
resultPrefix = p
|
|
|
|
}
|
|
|
|
|
2024-05-24 22:30:58 +00:00
|
|
|
keepOriginalFields := false
|
|
|
|
skipEmptyResults := false
|
|
|
|
switch {
|
|
|
|
case lex.isKeyword("keep_original_fields"):
|
|
|
|
lex.nextToken()
|
|
|
|
keepOriginalFields = true
|
|
|
|
case lex.isKeyword("skip_empty_results"):
|
|
|
|
lex.nextToken()
|
|
|
|
skipEmptyResults = true
|
|
|
|
}
|
|
|
|
|
2024-05-20 02:08:30 +00:00
|
|
|
pu := &pipeUnpackLogfmt{
|
2024-05-24 22:30:58 +00:00
|
|
|
fromField: fromField,
|
|
|
|
fields: fields,
|
|
|
|
resultPrefix: resultPrefix,
|
|
|
|
keepOriginalFields: keepOriginalFields,
|
|
|
|
skipEmptyResults: skipEmptyResults,
|
|
|
|
iff: iff,
|
2024-05-20 02:08:30 +00:00
|
|
|
}
|
|
|
|
|
2024-05-22 19:01:20 +00:00
|
|
|
return pu, nil
|
2024-05-20 02:08:30 +00:00
|
|
|
}
|