2024-05-19 19:25:52 +00:00
|
|
|
package logstorage
|
|
|
|
|
|
|
|
import (
|
|
|
|
"fmt"
|
|
|
|
"unsafe"
|
|
|
|
)
|
|
|
|
|
|
|
|
// pipeUnpackJSON processes '| unpack_json ...' pipe.
|
|
|
|
//
|
|
|
|
// See https://docs.victoriametrics.com/victorialogs/logsql/#unpack_json-pipe
|
|
|
|
type pipeUnpackJSON struct {
|
|
|
|
fromField string
|
|
|
|
|
|
|
|
resultPrefix string
|
|
|
|
}
|
|
|
|
|
|
|
|
func (pu *pipeUnpackJSON) String() string {
|
|
|
|
s := "unpack_json"
|
|
|
|
if !isMsgFieldName(pu.fromField) {
|
|
|
|
s += " from " + quoteTokenIfNeeded(pu.fromField)
|
|
|
|
}
|
|
|
|
if pu.resultPrefix != "" {
|
2024-05-19 21:27:52 +00:00
|
|
|
s += " result_prefix " + quoteTokenIfNeeded(pu.resultPrefix)
|
2024-05-19 19:25:52 +00:00
|
|
|
}
|
|
|
|
return s
|
|
|
|
}
|
|
|
|
|
|
|
|
func (pu *pipeUnpackJSON) updateNeededFields(neededFields, unneededFields fieldsSet) {
|
|
|
|
if neededFields.contains("*") {
|
|
|
|
unneededFields.remove(pu.fromField)
|
|
|
|
} else {
|
|
|
|
neededFields.add(pu.fromField)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-05-19 19:28:11 +00:00
|
|
|
func (pu *pipeUnpackJSON) newPipeProcessor(workersCount int, _ <-chan struct{}, _ func(), ppBase pipeProcessor) pipeProcessor {
|
2024-05-19 19:25:52 +00:00
|
|
|
shards := make([]pipeUnpackJSONProcessorShard, workersCount)
|
|
|
|
|
|
|
|
pup := &pipeUnpackJSONProcessor{
|
|
|
|
pu: pu,
|
|
|
|
ppBase: ppBase,
|
|
|
|
|
|
|
|
shards: shards,
|
|
|
|
}
|
|
|
|
return pup
|
|
|
|
}
|
|
|
|
|
|
|
|
type pipeUnpackJSONProcessor struct {
|
|
|
|
pu *pipeUnpackJSON
|
|
|
|
ppBase pipeProcessor
|
|
|
|
|
|
|
|
shards []pipeUnpackJSONProcessorShard
|
|
|
|
}
|
|
|
|
|
|
|
|
type pipeUnpackJSONProcessorShard struct {
|
|
|
|
pipeUnpackJSONProcessorShardNopad
|
|
|
|
|
|
|
|
// The padding prevents false sharing on widespread platforms with 128 mod (cache line size) = 0 .
|
|
|
|
_ [128 - unsafe.Sizeof(pipeUnpackJSONProcessorShardNopad{})%128]byte
|
|
|
|
}
|
|
|
|
|
|
|
|
type pipeUnpackJSONProcessorShardNopad struct {
|
2024-05-20 00:41:03 +00:00
|
|
|
jsonParser JSONParser
|
2024-05-19 19:25:52 +00:00
|
|
|
|
|
|
|
rcs []resultColumn
|
|
|
|
br blockResult
|
|
|
|
|
|
|
|
valuesLen int
|
|
|
|
}
|
|
|
|
|
|
|
|
func (shard *pipeUnpackJSONProcessorShard) writeRow(ppBase pipeProcessor, br *blockResult, cs []*blockResultColumn, rowIdx int, extraFields []Field) {
|
|
|
|
rcs := shard.rcs
|
|
|
|
|
|
|
|
areEqualColumns := len(rcs) == len(cs)+len(extraFields)
|
|
|
|
if areEqualColumns {
|
|
|
|
for i, f := range extraFields {
|
2024-05-19 21:27:52 +00:00
|
|
|
if rcs[len(cs)+i].name != f.Name {
|
2024-05-19 19:25:52 +00:00
|
|
|
areEqualColumns = false
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if !areEqualColumns {
|
|
|
|
// send the current block to bbBase and construct a block with new set of columns
|
|
|
|
shard.flush(ppBase)
|
|
|
|
|
|
|
|
rcs = shard.rcs[:0]
|
|
|
|
for _, c := range cs {
|
|
|
|
rcs = appendResultColumnWithName(rcs, c.name)
|
|
|
|
}
|
|
|
|
for _, f := range extraFields {
|
|
|
|
rcs = appendResultColumnWithName(rcs, f.Name)
|
|
|
|
}
|
|
|
|
shard.rcs = rcs
|
|
|
|
}
|
|
|
|
|
|
|
|
for i, c := range cs {
|
|
|
|
v := c.getValueAtRow(br, rowIdx)
|
|
|
|
rcs[i].addValue(v)
|
|
|
|
shard.valuesLen += len(v)
|
|
|
|
}
|
|
|
|
for i, f := range extraFields {
|
|
|
|
v := f.Value
|
2024-05-19 21:27:52 +00:00
|
|
|
rcs[len(cs)+i].addValue(v)
|
2024-05-19 19:25:52 +00:00
|
|
|
shard.valuesLen += len(v)
|
|
|
|
}
|
|
|
|
if shard.valuesLen >= 1_000_000 {
|
|
|
|
shard.flush(ppBase)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (shard *pipeUnpackJSONProcessorShard) flush(ppBase pipeProcessor) {
|
|
|
|
rcs := shard.rcs
|
|
|
|
|
|
|
|
shard.valuesLen = 0
|
|
|
|
|
|
|
|
if len(rcs) == 0 {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// Flush rcs to ppBase
|
|
|
|
br := &shard.br
|
|
|
|
br.setResultColumns(rcs)
|
|
|
|
ppBase.writeBlock(0, br)
|
|
|
|
br.reset()
|
|
|
|
for i := range rcs {
|
|
|
|
rcs[i].resetValues()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-05-20 00:41:03 +00:00
|
|
|
func (shard *pipeUnpackJSONProcessorShard) parseJSON(v, resultPrefix string) []Field {
|
|
|
|
if len(v) == 0 || v[0] != '{' {
|
|
|
|
// This isn't a JSON object
|
|
|
|
return nil
|
2024-05-19 19:25:52 +00:00
|
|
|
}
|
2024-05-20 00:41:03 +00:00
|
|
|
if err := shard.jsonParser.ParseLogMessageNoResetBuf(v, resultPrefix); err != nil {
|
|
|
|
// Cannot parse v
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
return shard.jsonParser.Fields
|
2024-05-19 19:25:52 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func (pup *pipeUnpackJSONProcessor) writeBlock(workerID uint, br *blockResult) {
|
|
|
|
if len(br.timestamps) == 0 {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
resultPrefix := pup.pu.resultPrefix
|
|
|
|
shard := &pup.shards[workerID]
|
|
|
|
|
|
|
|
cs := br.getColumns()
|
|
|
|
c := br.getColumnByName(pup.pu.fromField)
|
|
|
|
if c.isConst {
|
|
|
|
v := c.valuesEncoded[0]
|
2024-05-20 00:41:03 +00:00
|
|
|
extraFields := shard.parseJSON(v, resultPrefix)
|
2024-05-19 19:25:52 +00:00
|
|
|
for rowIdx := range br.timestamps {
|
2024-05-20 00:41:03 +00:00
|
|
|
shard.writeRow(pup.ppBase, br, cs, rowIdx, extraFields)
|
2024-05-19 19:25:52 +00:00
|
|
|
}
|
|
|
|
} else {
|
|
|
|
values := c.getValues(br)
|
2024-05-20 00:41:03 +00:00
|
|
|
var extraFields []Field
|
2024-05-19 19:25:52 +00:00
|
|
|
for i, v := range values {
|
|
|
|
if i == 0 || values[i-1] != v {
|
2024-05-20 00:41:03 +00:00
|
|
|
extraFields = shard.parseJSON(v, resultPrefix)
|
2024-05-19 19:25:52 +00:00
|
|
|
}
|
2024-05-20 00:41:03 +00:00
|
|
|
shard.writeRow(pup.ppBase, br, cs, i, extraFields)
|
2024-05-19 19:25:52 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
shard.flush(pup.ppBase)
|
2024-05-20 00:41:03 +00:00
|
|
|
shard.jsonParser.reset()
|
2024-05-19 19:25:52 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func (pup *pipeUnpackJSONProcessor) flush() error {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func parsePipeUnpackJSON(lex *lexer) (*pipeUnpackJSON, error) {
|
|
|
|
if !lex.isKeyword("unpack_json") {
|
|
|
|
return nil, fmt.Errorf("unexpected token: %q; want %q", lex.token, "unpack_json")
|
|
|
|
}
|
|
|
|
lex.nextToken()
|
|
|
|
|
|
|
|
fromField := "_msg"
|
|
|
|
if lex.isKeyword("from") {
|
|
|
|
lex.nextToken()
|
|
|
|
f, err := parseFieldName(lex)
|
|
|
|
if err != nil {
|
|
|
|
return nil, fmt.Errorf("cannot parse 'from' field name: %w", err)
|
|
|
|
}
|
|
|
|
fromField = f
|
|
|
|
}
|
|
|
|
|
|
|
|
resultPrefix := ""
|
|
|
|
if lex.isKeyword("result_prefix") {
|
|
|
|
lex.nextToken()
|
|
|
|
p, err := getCompoundToken(lex)
|
|
|
|
if err != nil {
|
|
|
|
return nil, fmt.Errorf("cannot parse 'result_prefix': %w", err)
|
|
|
|
}
|
|
|
|
resultPrefix = p
|
|
|
|
}
|
|
|
|
|
|
|
|
pu := &pipeUnpackJSON{
|
|
|
|
fromField: fromField,
|
|
|
|
resultPrefix: resultPrefix,
|
|
|
|
}
|
|
|
|
return pu, nil
|
|
|
|
}
|