VictoriaMetrics/lib/logstorage/pipe_unpack_json.go

185 lines
4.4 KiB
Go
Raw Normal View History

2024-05-19 19:25:52 +00:00
package logstorage
import (
"fmt"
2024-05-22 16:14:59 +00:00
"slices"
2024-05-20 12:09:39 +00:00
"github.com/VictoriaMetrics/VictoriaMetrics/lib/bytesutil"
2024-05-19 19:25:52 +00:00
)
// pipeUnpackJSON processes '| unpack_json ...' pipe.
//
// See https://docs.victoriametrics.com/victorialogs/logsql/#unpack_json-pipe
type pipeUnpackJSON struct {
2024-05-21 10:55:11 +00:00
// fromField is the field to unpack json fields from
2024-05-19 19:25:52 +00:00
fromField string
2024-05-22 16:14:59 +00:00
// fields is an optional list of fields to extract from json.
//
// if it is empty, then all the fields are extracted.
fields []string
2024-05-21 10:55:11 +00:00
// resultPrefix is prefix to add to unpacked field names
2024-05-19 19:25:52 +00:00
resultPrefix string
2024-05-21 10:55:11 +00:00
2024-05-24 16:31:49 +00:00
keepOriginalFields bool
2024-05-24 20:17:21 +00:00
skipEmptyResults bool
2024-05-24 16:31:49 +00:00
2024-05-21 10:55:11 +00:00
// iff is an optional filter for skipping unpacking json
iff *ifFilter
2024-05-19 19:25:52 +00:00
}
func (pu *pipeUnpackJSON) String() string {
s := "unpack_json"
2024-05-22 16:14:59 +00:00
if pu.iff != nil {
s += " " + pu.iff.String()
}
2024-05-19 19:25:52 +00:00
if !isMsgFieldName(pu.fromField) {
s += " from " + quoteTokenIfNeeded(pu.fromField)
}
2024-05-22 16:14:59 +00:00
if len(pu.fields) > 0 {
s += " fields (" + fieldsToString(pu.fields) + ")"
}
2024-05-19 19:25:52 +00:00
if pu.resultPrefix != "" {
2024-05-19 21:27:52 +00:00
s += " result_prefix " + quoteTokenIfNeeded(pu.resultPrefix)
2024-05-19 19:25:52 +00:00
}
2024-05-24 16:31:49 +00:00
if pu.keepOriginalFields {
s += " keep_original_fields"
}
2024-05-24 20:17:21 +00:00
if pu.skipEmptyResults {
s += " skip_empty_results"
}
2024-05-19 19:25:52 +00:00
return s
}
func (pu *pipeUnpackJSON) updateNeededFields(neededFields, unneededFields fieldsSet) {
2024-05-24 20:17:21 +00:00
updateNeededFieldsForUnpackPipe(pu.fromField, pu.fields, pu.keepOriginalFields, pu.skipEmptyResults, pu.iff, neededFields, unneededFields)
2024-05-22 18:53:31 +00:00
}
2024-05-25 12:37:26 +00:00
func (pu *pipeUnpackJSON) optimize() {
pu.iff.optimizeFilterIn()
}
func (pu *pipeUnpackJSON) hasFilterInWithQuery() bool {
return pu.iff.hasFilterInWithQuery()
}
func (pu *pipeUnpackJSON) initFilterInValues(cache map[string][]string, getFieldValuesFunc getFieldValuesFunc) (pipe, error) {
iffNew, err := pu.iff.initFilterInValues(cache, getFieldValuesFunc)
if err != nil {
return nil, err
2024-05-19 19:25:52 +00:00
}
2024-05-25 12:37:26 +00:00
puNew := *pu
puNew.iff = iffNew
return &puNew, nil
2024-05-19 19:25:52 +00:00
}
2024-05-19 19:28:11 +00:00
func (pu *pipeUnpackJSON) newPipeProcessor(workersCount int, _ <-chan struct{}, _ func(), ppBase pipeProcessor) pipeProcessor {
2024-05-22 16:14:59 +00:00
unpackJSON := func(uctx *fieldsUnpackerContext, s string) {
if len(s) == 0 || s[0] != '{' {
// This isn't a JSON object
return
2024-05-19 19:25:52 +00:00
}
2024-05-22 16:14:59 +00:00
p := GetJSONParser()
2024-05-22 18:53:31 +00:00
err := p.ParseLogMessage(bytesutil.ToUnsafeBytes(s))
if err != nil {
for _, fieldName := range pu.fields {
uctx.addField(fieldName, "")
}
} else {
2024-05-22 16:14:59 +00:00
if len(pu.fields) == 0 {
for _, f := range p.Fields {
uctx.addField(f.Name, f.Value)
}
} else {
for _, fieldName := range pu.fields {
2024-05-22 18:53:31 +00:00
addedField := false
2024-05-22 16:14:59 +00:00
for _, f := range p.Fields {
if f.Name == fieldName {
uctx.addField(f.Name, f.Value)
2024-05-22 18:53:31 +00:00
addedField = true
2024-05-22 16:34:08 +00:00
break
2024-05-22 16:14:59 +00:00
}
}
2024-05-22 18:53:31 +00:00
if !addedField {
uctx.addField(fieldName, "")
}
2024-05-22 16:14:59 +00:00
}
}
}
PutJSONParser(p)
2024-05-19 19:25:52 +00:00
}
2024-05-24 20:17:21 +00:00
return newPipeUnpackProcessor(workersCount, unpackJSON, ppBase, pu.fromField, pu.resultPrefix, pu.keepOriginalFields, pu.skipEmptyResults, pu.iff)
2024-05-19 19:25:52 +00:00
}
func parsePipeUnpackJSON(lex *lexer) (*pipeUnpackJSON, error) {
if !lex.isKeyword("unpack_json") {
return nil, fmt.Errorf("unexpected token: %q; want %q", lex.token, "unpack_json")
}
lex.nextToken()
2024-05-22 16:14:59 +00:00
var iff *ifFilter
if lex.isKeyword("if") {
f, err := parseIfFilter(lex)
if err != nil {
return nil, err
}
iff = f
}
2024-05-19 19:25:52 +00:00
fromField := "_msg"
if lex.isKeyword("from") {
lex.nextToken()
f, err := parseFieldName(lex)
if err != nil {
return nil, fmt.Errorf("cannot parse 'from' field name: %w", err)
}
fromField = f
}
2024-05-22 16:14:59 +00:00
var fields []string
if lex.isKeyword("fields") {
lex.nextToken()
fs, err := parseFieldNamesInParens(lex)
if err != nil {
return nil, fmt.Errorf("cannot parse 'fields': %w", err)
}
fields = fs
if slices.Contains(fields, "*") {
fields = nil
}
}
2024-05-19 19:25:52 +00:00
resultPrefix := ""
if lex.isKeyword("result_prefix") {
lex.nextToken()
p, err := getCompoundToken(lex)
if err != nil {
return nil, fmt.Errorf("cannot parse 'result_prefix': %w", err)
}
resultPrefix = p
}
2024-05-24 16:31:49 +00:00
keepOriginalFields := false
2024-05-24 20:17:21 +00:00
skipEmptyResults := false
switch {
case lex.isKeyword("keep_original_fields"):
2024-05-24 16:31:49 +00:00
lex.nextToken()
keepOriginalFields = true
2024-05-24 20:17:21 +00:00
case lex.isKeyword("skip_empty_results"):
lex.nextToken()
skipEmptyResults = true
2024-05-24 16:31:49 +00:00
}
2024-05-19 19:25:52 +00:00
pu := &pipeUnpackJSON{
2024-05-24 16:31:49 +00:00
fromField: fromField,
fields: fields,
resultPrefix: resultPrefix,
keepOriginalFields: keepOriginalFields,
2024-05-24 20:17:21 +00:00
skipEmptyResults: skipEmptyResults,
2024-05-24 16:31:49 +00:00
iff: iff,
2024-05-21 10:55:11 +00:00
}
2024-05-19 19:25:52 +00:00
return pu, nil
}