2020-03-10 17:35:58 +00:00
|
|
|
package csvimport
|
|
|
|
|
|
|
|
import (
|
|
|
|
"fmt"
|
|
|
|
|
|
|
|
"github.com/VictoriaMetrics/VictoriaMetrics/lib/logger"
|
2020-03-12 13:27:45 +00:00
|
|
|
"github.com/VictoriaMetrics/metrics"
|
2020-03-10 17:35:58 +00:00
|
|
|
"github.com/valyala/fastjson/fastfloat"
|
|
|
|
)
|
|
|
|
|
|
|
|
// Rows represents csv rows.
|
|
|
|
type Rows struct {
|
|
|
|
// Rows contains parsed csv rows after the call to Unmarshal.
|
|
|
|
Rows []Row
|
|
|
|
|
|
|
|
sc scanner
|
|
|
|
tagsPool []Tag
|
|
|
|
metricsPool []metric
|
|
|
|
}
|
|
|
|
|
|
|
|
// Reset resets rs.
|
|
|
|
func (rs *Rows) Reset() {
|
|
|
|
rows := rs.Rows
|
|
|
|
for i := range rows {
|
|
|
|
r := &rows[i]
|
|
|
|
r.Metric = ""
|
|
|
|
r.Tags = nil
|
|
|
|
r.Value = 0
|
|
|
|
r.Timestamp = 0
|
|
|
|
}
|
|
|
|
rs.Rows = rs.Rows[:0]
|
|
|
|
|
|
|
|
rs.sc.Init("")
|
|
|
|
|
|
|
|
tags := rs.tagsPool
|
|
|
|
for i := range tags {
|
|
|
|
t := &tags[i]
|
|
|
|
t.Key = ""
|
|
|
|
t.Value = ""
|
|
|
|
}
|
|
|
|
rs.tagsPool = rs.tagsPool[:0]
|
|
|
|
|
|
|
|
metrics := rs.metricsPool
|
|
|
|
for i := range metrics {
|
|
|
|
m := &metrics[i]
|
|
|
|
m.Name = ""
|
|
|
|
m.Value = 0
|
|
|
|
}
|
|
|
|
rs.metricsPool = rs.metricsPool[:0]
|
|
|
|
}
|
|
|
|
|
|
|
|
// Row represents a single metric row
|
|
|
|
type Row struct {
|
|
|
|
Metric string
|
|
|
|
Tags []Tag
|
|
|
|
Value float64
|
|
|
|
Timestamp int64
|
|
|
|
}
|
|
|
|
|
|
|
|
// Tag represents metric tag
|
|
|
|
type Tag struct {
|
|
|
|
Key string
|
|
|
|
Value string
|
|
|
|
}
|
|
|
|
|
|
|
|
type metric struct {
|
|
|
|
Name string
|
|
|
|
Value float64
|
|
|
|
}
|
|
|
|
|
2022-12-29 19:52:10 +00:00
|
|
|
// Unmarshal unmarshals csv lines from s according to the given cds.
|
2020-03-10 17:35:58 +00:00
|
|
|
func (rs *Rows) Unmarshal(s string, cds []ColumnDescriptor) {
|
|
|
|
rs.sc.Init(s)
|
|
|
|
rs.Rows, rs.tagsPool, rs.metricsPool = parseRows(&rs.sc, rs.Rows[:0], rs.tagsPool[:0], rs.metricsPool[:0], cds)
|
|
|
|
}
|
|
|
|
|
|
|
|
func parseRows(sc *scanner, dst []Row, tags []Tag, metrics []metric, cds []ColumnDescriptor) ([]Row, []Tag, []metric) {
|
|
|
|
for sc.NextLine() {
|
|
|
|
line := sc.Line
|
|
|
|
var r Row
|
|
|
|
col := uint(0)
|
|
|
|
metrics = metrics[:0]
|
|
|
|
tagsLen := len(tags)
|
|
|
|
for sc.NextColumn() {
|
|
|
|
if col >= uint(len(cds)) {
|
2020-11-24 10:41:34 +00:00
|
|
|
// Skip superfluous column.
|
2020-03-10 17:35:58 +00:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
cd := &cds[col]
|
|
|
|
col++
|
2023-05-12 22:16:55 +00:00
|
|
|
if cd.isEmpty() || sc.Column == "" {
|
|
|
|
// Ignore empty column.
|
|
|
|
continue
|
|
|
|
}
|
2020-03-10 17:35:58 +00:00
|
|
|
if parseTimestamp := cd.ParseTimestamp; parseTimestamp != nil {
|
|
|
|
timestamp, err := parseTimestamp(sc.Column)
|
|
|
|
if err != nil {
|
2020-06-30 19:58:18 +00:00
|
|
|
sc.Error = fmt.Errorf("cannot parse timestamp from %q: %w", sc.Column, err)
|
2020-03-10 17:35:58 +00:00
|
|
|
break
|
|
|
|
}
|
|
|
|
r.Timestamp = timestamp
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
if tagName := cd.TagName; tagName != "" {
|
|
|
|
tags = append(tags, Tag{
|
|
|
|
Key: tagName,
|
|
|
|
Value: sc.Column,
|
|
|
|
})
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
metricName := cd.MetricName
|
2023-05-12 22:16:55 +00:00
|
|
|
if metricName == "" {
|
|
|
|
logger.Panicf("BUG: unexpected empty MetricName")
|
2020-03-10 17:35:58 +00:00
|
|
|
}
|
2020-09-15 23:03:35 +00:00
|
|
|
value, err := fastfloat.Parse(sc.Column)
|
|
|
|
if err != nil {
|
|
|
|
sc.Error = fmt.Errorf("cannot parse metric value for %q from %q: %w", metricName, sc.Column, err)
|
|
|
|
}
|
2020-03-10 17:35:58 +00:00
|
|
|
metrics = append(metrics, metric{
|
|
|
|
Name: metricName,
|
|
|
|
Value: value,
|
|
|
|
})
|
|
|
|
}
|
2023-05-12 22:22:27 +00:00
|
|
|
if col < uint(len(cds)) && sc.Error == nil {
|
2020-03-10 17:35:58 +00:00
|
|
|
sc.Error = fmt.Errorf("missing columns in the csv line %q; got %d columns; want at least %d columns", line, col, len(cds))
|
|
|
|
}
|
|
|
|
if sc.Error != nil {
|
|
|
|
logger.Errorf("error when parsing csv line %q: %s; skipping this line", line, sc.Error)
|
2020-03-12 13:27:45 +00:00
|
|
|
invalidLines.Inc()
|
2020-03-10 17:35:58 +00:00
|
|
|
continue
|
|
|
|
}
|
2023-05-12 22:22:27 +00:00
|
|
|
if len(metrics) == 0 {
|
2023-05-12 22:16:55 +00:00
|
|
|
continue
|
2023-05-12 22:22:27 +00:00
|
|
|
}
|
2020-03-10 17:35:58 +00:00
|
|
|
r.Metric = metrics[0].Name
|
|
|
|
r.Tags = tags[tagsLen:]
|
|
|
|
r.Value = metrics[0].Value
|
|
|
|
dst = append(dst, r)
|
|
|
|
for _, m := range metrics[1:] {
|
|
|
|
dst = append(dst, Row{
|
|
|
|
Metric: m.Name,
|
|
|
|
Tags: r.Tags,
|
|
|
|
Value: m.Value,
|
|
|
|
Timestamp: r.Timestamp,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return dst, tags, metrics
|
|
|
|
}
|
2020-03-12 13:27:45 +00:00
|
|
|
|
|
|
|
var invalidLines = metrics.NewCounter(`vm_rows_invalid_total{type="csvimport"}`)
|