Revert "lib/protoparser: fix skip csv line when metric can be collect from the line (#4298)"

This reverts commit 410ae99c2e.

Reason for revert: the commit masks the real issue instead of fixing it.
The real issue is that the scanner.NextColumn() skips the last column if it is empty.

The commit also introduces two bugs:

- a panic if all the metric values in CSV line are empty
- silent import of CSV lines with too small number of columns

Updates https://github.com/VictoriaMetrics/VictoriaMetrics/issues/4048
See https://github.com/VictoriaMetrics/VictoriaMetrics/pull/4298
This commit is contained in:
Aliaksandr Valialkin 2023-05-12 15:22:27 -07:00
parent ff39df74d3
commit 6fd39e2000
No known key found for this signature in database
GPG key ID: A72BEC6CD3D0DED1
3 changed files with 4 additions and 39 deletions

View file

@ -74,7 +74,6 @@ The following tip changes can be tested by building VictoriaMetrics components f
* BUGFIX: [vmbackup](https://docs.victoriametrics.com/vmbackup.html): fix compatibility with Windows OS. See [this issue](https://github.com/VictoriaMetrics/VictoriaMetrics/issues/70). * BUGFIX: [vmbackup](https://docs.victoriametrics.com/vmbackup.html): fix compatibility with Windows OS. See [this issue](https://github.com/VictoriaMetrics/VictoriaMetrics/issues/70).
* BUGFIX: [vmctl](https://docs.victoriametrics.com/vmctl.html): fix performance issue when migrating data from VictoriaMetrics according to [these docs](https://docs.victoriametrics.com/vmctl.html#migrating-data-from-victoriametrics). Add the ability to speed up the data migration via `--vm-native-disable-retries` command-line flag. See [this issue](https://github.com/VictoriaMetrics/VictoriaMetrics/issues/4092). * BUGFIX: [vmctl](https://docs.victoriametrics.com/vmctl.html): fix performance issue when migrating data from VictoriaMetrics according to [these docs](https://docs.victoriametrics.com/vmctl.html#migrating-data-from-victoriametrics). Add the ability to speed up the data migration via `--vm-native-disable-retries` command-line flag. See [this issue](https://github.com/VictoriaMetrics/VictoriaMetrics/issues/4092).
* BUGFIX: [stream aggregation](https://docs.victoriametrics.com/stream-aggregation.html): fix bug with duplicated labels during stream aggregation via single-node VictoriaMetrics. See [this issue](https://github.com/VictoriaMetrics/VictoriaMetrics/issues/4277). * BUGFIX: [stream aggregation](https://docs.victoriametrics.com/stream-aggregation.html): fix bug with duplicated labels during stream aggregation via single-node VictoriaMetrics. See [this issue](https://github.com/VictoriaMetrics/VictoriaMetrics/issues/4277).
* BUGFIX: [csvimport](https://docs.victoriametrics.com/#how-to-import-csv-data): properly parse [csv line](https://docs.victoriametrics.com/#how-to-import-csv-data) when value in the last column is empty. See [this issue](https://github.com/VictoriaMetrics/VictoriaMetrics/issues/4048).
* BUGFIX: [relabeling](https://docs.victoriametrics.com/relabeling.html): properly validate labels input on Metric Relabel Debug page in [VMUI](https://docs.victoriametrics.com/#vmui). See [this issue](https://github.com/VictoriaMetrics/VictoriaMetrics/issues/4284). * BUGFIX: [relabeling](https://docs.victoriametrics.com/relabeling.html): properly validate labels input on Metric Relabel Debug page in [VMUI](https://docs.victoriametrics.com/#vmui). See [this issue](https://github.com/VictoriaMetrics/VictoriaMetrics/issues/4284).
## [v1.90.0](https://github.com/VictoriaMetrics/VictoriaMetrics/releases/tag/v1.90.0) ## [v1.90.0](https://github.com/VictoriaMetrics/VictoriaMetrics/releases/tag/v1.90.0)

View file

@ -118,8 +118,7 @@ func parseRows(sc *scanner, dst []Row, tags []Tag, metrics []metric, cds []Colum
Value: value, Value: value,
}) })
} }
if col < uint(len(cds)) && sc.Error == nil {
if col < uint(len(cds)) && sc.Error == nil && len(metrics) == 0 {
sc.Error = fmt.Errorf("missing columns in the csv line %q; got %d columns; want at least %d columns", line, col, len(cds)) sc.Error = fmt.Errorf("missing columns in the csv line %q; got %d columns; want at least %d columns", line, col, len(cds))
} }
if sc.Error != nil { if sc.Error != nil {
@ -127,6 +126,9 @@ func parseRows(sc *scanner, dst []Row, tags []Tag, metrics []metric, cds []Colum
invalidLines.Inc() invalidLines.Inc()
continue continue
} }
if len(metrics) == 0 {
logger.Panicf("BUG: expecting at least a single metric in columnDescriptors=%#v", cds)
}
r.Metric = metrics[0].Name r.Metric = metrics[0].Name
r.Tags = tags[tagsLen:] r.Tags = tags[tagsLen:]
r.Value = metrics[0].Value r.Value = metrics[0].Value

View file

@ -55,42 +55,6 @@ func TestRowsUnmarshalSuccess(t *testing.T) {
t.Fatalf("unexpected rows on the second unmarshal;\ngot\n%v\nwant\n%v", rs.Rows, rowsExpected) t.Fatalf("unexpected rows on the second unmarshal;\ngot\n%v\nwant\n%v", rs.Rows, rowsExpected)
} }
} }
f("1:label:mytest,2:time:unix_ns,3:metric:metric_1,4:metric:metric_2", "test,1677632461449998000,,", nil)
f("1:time:unix_ns,2:metric:metric_1,3:metric:metric_2", "1677632461449998000,,", nil)
f("1:time:unix_ns,2:metric:metric_1,3:metric:metric_2", "1677632461449998000,1,", []Row{
{
Metric: "metric_1",
Value: 1,
Timestamp: 1677632461449,
},
})
f("1:time:unix_ns,2:metric:metric_1,3:metric:metric_2",
`
1677632461449998000,1,1
1677633061449998000,1,
1677633661449998000,,1`, []Row{
{
Metric: "metric_1",
Value: 1,
Timestamp: 1677632461449,
},
{
Metric: "metric_2",
Value: 1,
Timestamp: 1677632461449,
},
{
Metric: "metric_1",
Value: 1,
Timestamp: 1677633061449,
},
{
Metric: "metric_2",
Value: 1,
Timestamp: 1677633661449,
},
})
f("1:metric:foo", "", nil) f("1:metric:foo", "", nil)
f("1:metric:foo", `123`, []Row{ f("1:metric:foo", `123`, []Row{
{ {