VictoriaMetrics/lib/promscrape/scrapework_test.go
Aliaksandr Valialkin bb00bae353
Revert "Exemplar support (#5982)"
This reverts commit 5a3abfa041.

Reason for revert: exemplars aren't in wide use because they have numerous issues which prevent their adoption (see below).
Adding support for examplars into VictoriaMetrics introduces non-trivial code changes. These code changes need to be supported forever
once the release of VictoriaMetrics with exemplar support is published. That's why I don't think this is a good feature despite
that the source code of the reverted commit has an excellent quality. See https://docs.victoriametrics.com/goals/ .

Issues with Prometheus exemplars:

- Prometheus still has only experimental support for exemplars after more than three years since they were introduced.
  It stores exemplars in memory, so they are lost after Prometheus restart. This doesn't look like production-ready feature.
  See 0a2f3b3794/content/docs/instrumenting/exposition_formats.md (L153-L159)
  and https://prometheus.io/docs/prometheus/latest/feature_flags/#exemplars-storage

- It is very non-trivial to expose exemplars alongside metrics in your application, since the official Prometheus SDKs
  for metrics' exposition ( https://prometheus.io/docs/instrumenting/clientlibs/ ) either have very hard-to-use API
  for exposing histograms or do not have this API at all. For example, try figuring out how to expose exemplars
  via https://pkg.go.dev/github.com/prometheus/client_golang@v1.19.1/prometheus .

- It looks like exemplars are supported for Histogram metric types only -
  see https://pkg.go.dev/github.com/prometheus/client_golang@v1.19.1/prometheus#Timer.ObserveDurationWithExemplar .
  Exemplars aren't supported for Counter, Gauge and Summary metric types.

- Grafana has very poor support for Prometheus exemplars. It looks like it supports exemplars only when the query
  contains histogram_quantile() function. It queries exemplars via special Prometheus API -
  https://prometheus.io/docs/prometheus/latest/querying/api/#querying-exemplars - (which is still marked as experimental, btw.)
  and then displays all the returned exemplars on the graph as special dots. The issue is that this doesn't work
  in production in most cases when the histogram_quantile() is calculated over thousands of histogram buckets
  exposed by big number of application instances. Every histogram bucket may expose an exemplar on every timestamp shown on the graph.
  This makes the graph unusable, since it is litterally filled with thousands of exemplar dots.
  Neither Prometheus API nor Grafana doesn't provide the ability to filter out unneeded exemplars.

- Exemplars are usually connected to traces. While traces are good for some

I doubt exemplars will become production-ready in the near future because of the issues outlined above.

Alternative to exemplars:

Exemplars are marketed as a silver bullet for the correlation between metrics, traces and logs -
just click the exemplar dot on some graph in Grafana and instantly see the corresponding trace or log entry!
This doesn't work as expected in production as shown above. Are there better solutions, which work in production?
Yes - just use time-based and label-based correlation between metrics, traces and logs. Assign the same `job`
and `instance` labels to metrics, logs and traces, so you can quickly find the needed trace or log entry
by these labes on the time range with the anomaly on metrics' graph.
2024-07-03 15:30:21 +02:00

878 lines
26 KiB
Go

package promscrape
import (
"fmt"
"strings"
"testing"
"time"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/auth"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/bytesutil"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/prompbmarshal"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/promrelabel"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/promutils"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/protoparser/common"
parser "github.com/VictoriaMetrics/VictoriaMetrics/lib/protoparser/prometheus"
)
func TestIsAutoMetric(t *testing.T) {
f := func(metric string, resultExpected bool) {
t.Helper()
result := isAutoMetric(metric)
if result != resultExpected {
t.Fatalf("unexpected result for isAutoMetric(%q); got %v; want %v", metric, result, resultExpected)
}
}
f("up", true)
f("scrape_duration_seconds", true)
f("scrape_samples_scraped", true)
f("scrape_samples_post_metric_relabeling", true)
f("scrape_series_added", true)
f("scrape_timeout_seconds", true)
f("scrape_samples_limit", true)
f("scrape_series_limit_samples_dropped", true)
f("scrape_series_limit", true)
f("scrape_series_current", true)
f("foobar", false)
f("exported_up", false)
f("upx", false)
}
func TestAppendExtraLabels(t *testing.T) {
f := func(sourceLabels, extraLabels string, honorLabels bool, resultExpected string) {
t.Helper()
src := promutils.MustNewLabelsFromString(sourceLabels)
extra := promutils.MustNewLabelsFromString(extraLabels)
var labels promutils.Labels
labels.Labels = appendExtraLabels(src.GetLabels(), extra.GetLabels(), 0, honorLabels)
result := labels.String()
if result != resultExpected {
t.Fatalf("unexpected result; got\n%s\nwant\n%s", result, resultExpected)
}
}
f("{}", "{}", true, "{}")
f("{}", "{}", false, "{}")
f("foo", "{}", true, `{__name__="foo"}`)
f("foo", "{}", false, `{__name__="foo"}`)
f("foo", "bar", true, `{__name__="foo"}`)
f("foo", "bar", false, `{exported___name__="foo",__name__="bar"}`)
f(`{a="b"}`, `{c="d"}`, true, `{a="b",c="d"}`)
f(`{a="b"}`, `{c="d"}`, false, `{a="b",c="d"}`)
f(`{a="b"}`, `{a="d"}`, true, `{a="b"}`)
f(`{a="b"}`, `{a="d"}`, false, `{exported_a="b",a="d"}`)
f(`{a="b",exported_a="x"}`, `{a="d"}`, true, `{a="b",exported_a="x"}`)
f(`{a="b",exported_a="x"}`, `{a="d"}`, false, `{exported_a="b",exported_exported_a="x",a="d"}`)
f(`{a="b"}`, `{a="d",exported_a="x"}`, true, `{a="b",exported_a="x"}`)
f(`{a="b"}`, `{a="d",exported_a="x"}`, false, `{exported_exported_a="b",a="d",exported_a="x"}`)
f(`{foo="a",exported_foo="b"}`, `{exported_foo="c"}`, true, `{foo="a",exported_foo="b"}`)
f(`{foo="a",exported_foo="b"}`, `{exported_foo="c"}`, false, `{foo="a",exported_exported_foo="b",exported_foo="c"}`)
f(`{foo="a",exported_foo="b"}`, `{exported_foo="c",bar="x"}`, true, `{foo="a",exported_foo="b",bar="x"}`)
f(`{foo="a",exported_foo="b"}`, `{exported_foo="c",bar="x"}`, false, `{foo="a",exported_exported_foo="b",exported_foo="c",bar="x"}`)
f(`{foo="a",exported_foo="b"}`, `{exported_foo="c",foo="d"}`, true, `{foo="a",exported_foo="b"}`)
f(`{foo="a",exported_foo="b"}`, `{exported_foo="c",foo="d"}`, false, `{exported_foo="a",exported_exported_foo="b",exported_foo="c",foo="d"}`)
}
func TestScrapeWorkScrapeInternalFailure(t *testing.T) {
dataExpected := `
up 0 123
scrape_samples_scraped 0 123
scrape_response_size_bytes 0 123
scrape_duration_seconds 0 123
scrape_samples_post_metric_relabeling 0 123
scrape_series_added 0 123
scrape_timeout_seconds 42 123
`
timeseriesExpected := parseData(dataExpected)
var sw scrapeWork
sw.Config = &ScrapeWork{
ScrapeTimeout: time.Second * 42,
}
readDataCalls := 0
sw.ReadData = func(_ *bytesutil.ByteBuffer) error {
readDataCalls++
return fmt.Errorf("error when reading data")
}
pushDataCalls := 0
var pushDataErr error
sw.PushData = func(_ *auth.Token, wr *prompbmarshal.WriteRequest) {
if err := expectEqualTimeseries(wr.Timeseries, timeseriesExpected); err != nil {
pushDataErr = fmt.Errorf("unexpected data pushed: %w\ngot\n%#v\nwant\n%#v", err, wr.Timeseries, timeseriesExpected)
}
pushDataCalls++
}
timestamp := int64(123000)
tsmGlobal.Register(&sw)
if err := sw.scrapeInternal(timestamp, timestamp); err == nil {
t.Fatalf("expecting non-nil error")
}
tsmGlobal.Unregister(&sw)
if pushDataErr != nil {
t.Fatalf("unexpected error: %s", pushDataErr)
}
if readDataCalls != 1 {
t.Fatalf("unexpected number of readData calls; got %d; want %d", readDataCalls, 1)
}
if pushDataCalls != 1 {
t.Fatalf("unexpected number of pushData calls; got %d; want %d", pushDataCalls, 1)
}
}
func TestScrapeWorkScrapeInternalSuccess(t *testing.T) {
f := func(data string, cfg *ScrapeWork, dataExpected string) {
t.Helper()
timeseriesExpected := parseData(dataExpected)
var sw scrapeWork
sw.Config = cfg
readDataCalls := 0
sw.ReadData = func(dst *bytesutil.ByteBuffer) error {
readDataCalls++
dst.B = append(dst.B, data...)
return nil
}
pushDataCalls := 0
var pushDataErr error
sw.PushData = func(_ *auth.Token, wr *prompbmarshal.WriteRequest) {
pushDataCalls++
if len(wr.Timeseries) > len(timeseriesExpected) {
pushDataErr = fmt.Errorf("too many time series obtained; got %d; want %d\ngot\n%+v\nwant\n%+v",
len(wr.Timeseries), len(timeseriesExpected), wr.Timeseries, timeseriesExpected)
return
}
tsExpected := timeseriesExpected[:len(wr.Timeseries)]
timeseriesExpected = timeseriesExpected[len(tsExpected):]
if err := expectEqualTimeseries(wr.Timeseries, tsExpected); err != nil {
pushDataErr = fmt.Errorf("unexpected data pushed: %w\ngot\n%v\nwant\n%v", err, wr.Timeseries, tsExpected)
return
}
}
timestamp := int64(123000)
tsmGlobal.Register(&sw)
if err := sw.scrapeInternal(timestamp, timestamp); err != nil {
if !strings.Contains(err.Error(), "sample_limit") {
t.Fatalf("unexpected error: %s", err)
}
}
tsmGlobal.Unregister(&sw)
if pushDataErr != nil {
t.Fatalf("unexpected error: %s", pushDataErr)
}
if readDataCalls != 1 {
t.Fatalf("unexpected number of readData calls; got %d; want %d", readDataCalls, 1)
}
if pushDataCalls == 0 {
t.Fatalf("missing pushData calls")
}
if len(timeseriesExpected) != 0 {
t.Fatalf("%d series weren't pushed", len(timeseriesExpected))
}
}
f(``, &ScrapeWork{
ScrapeTimeout: time.Second * 42,
}, `
up 1 123
scrape_samples_scraped 0 123
scrape_response_size_bytes 0 123
scrape_duration_seconds 0 123
scrape_samples_post_metric_relabeling 0 123
scrape_series_added 0 123
scrape_timeout_seconds 42 123
`)
f(`
foo{bar="baz",empty_label=""} 34.45 3
abc -2
`, &ScrapeWork{
ScrapeTimeout: time.Second * 42,
}, `
foo{bar="baz"} 34.45 123
abc -2 123
up 1 123
scrape_samples_scraped 2 123
scrape_response_size_bytes 51 123
scrape_duration_seconds 0 123
scrape_samples_post_metric_relabeling 2 123
scrape_series_added 2 123
scrape_timeout_seconds 42 123
`)
f(`
foo{bar="baz"} 34.45 3
abc -2
`, &ScrapeWork{
ScrapeTimeout: time.Second * 42,
HonorTimestamps: true,
Labels: promutils.NewLabelsFromMap(map[string]string{
"foo": "x",
}),
}, `
foo{bar="baz",foo="x"} 34.45 3
abc{foo="x"} -2 123
up{foo="x"} 1 123
scrape_samples_scraped{foo="x"} 2 123
scrape_response_size_bytes{foo="x"} 36 123
scrape_duration_seconds{foo="x"} 0 123
scrape_samples_post_metric_relabeling{foo="x"} 2 123
scrape_series_added{foo="x"} 2 123
scrape_timeout_seconds{foo="x"} 42 123
`)
f(`
foo{job="orig",bar="baz"} 34.45
bar{y="2",job="aa",a="b",x="1"} -3e4 2345
`, &ScrapeWork{
ScrapeTimeout: time.Second * 42,
HonorLabels: false,
Labels: promutils.NewLabelsFromMap(map[string]string{
"job": "override",
}),
}, `
foo{exported_job="orig",job="override",bar="baz"} 34.45 123
bar{exported_job="aa",job="override",x="1",a="b",y="2"} -3e4 123
up{job="override"} 1 123
scrape_samples_scraped{job="override"} 2 123
scrape_response_size_bytes{job="override"} 80 123
scrape_duration_seconds{job="override"} 0 123
scrape_samples_post_metric_relabeling{job="override"} 2 123
scrape_series_added{job="override"} 2 123
scrape_timeout_seconds{job="override"} 42 123
`)
// Empty instance override. See https://github.com/VictoriaMetrics/VictoriaMetrics/issues/453
f(`
no_instance{instance="",job="some_job",label="val1",test=""} 5555
test_with_instance{instance="some_instance",job="some_job",label="val2",test=""} 1555
`, &ScrapeWork{
ScrapeTimeout: time.Second * 42,
HonorLabels: true,
Labels: promutils.NewLabelsFromMap(map[string]string{
"instance": "foobar",
"job": "xxx",
}),
}, `
no_instance{job="some_job",label="val1"} 5555 123
test_with_instance{instance="some_instance",job="some_job",label="val2"} 1555 123
up{instance="foobar",job="xxx"} 1 123
scrape_samples_scraped{instance="foobar",job="xxx"} 2 123
scrape_response_size_bytes{instance="foobar",job="xxx"} 158 123
scrape_duration_seconds{instance="foobar",job="xxx"} 0 123
scrape_samples_post_metric_relabeling{instance="foobar",job="xxx"} 2 123
scrape_series_added{instance="foobar",job="xxx"} 2 123
scrape_timeout_seconds{instance="foobar",job="xxx"} 42 123
`)
f(`
no_instance{instance="",job="some_job",label="val1",test=""} 5555
test_with_instance{instance="some_instance",job="some_job",label="val2",test=""} 1555
`, &ScrapeWork{
ScrapeTimeout: time.Second * 42,
HonorLabels: false,
Labels: promutils.NewLabelsFromMap(map[string]string{
"instance": "foobar",
"job": "xxx",
}),
}, `
no_instance{exported_job="some_job",instance="foobar",job="xxx",label="val1"} 5555 123
test_with_instance{exported_instance="some_instance",exported_job="some_job",instance="foobar",job="xxx",label="val2"} 1555 123
up{instance="foobar",job="xxx"} 1 123
scrape_samples_scraped{instance="foobar",job="xxx"} 2 123
scrape_response_size_bytes{instance="foobar",job="xxx"} 158 123
scrape_duration_seconds{instance="foobar",job="xxx"} 0 123
scrape_samples_post_metric_relabeling{instance="foobar",job="xxx"} 2 123
scrape_series_added{instance="foobar",job="xxx"} 2 123
scrape_timeout_seconds{instance="foobar",job="xxx"} 42 123
`)
f(`
foo{job="orig",bar="baz"} 34.45
bar{job="aa",a="b"} -3e4 2345
`, &ScrapeWork{
ScrapeTimeout: time.Second * 42,
HonorLabels: true,
Labels: promutils.NewLabelsFromMap(map[string]string{
"job": "override",
}),
}, `
foo{job="orig",bar="baz"} 34.45 123
bar{job="aa",a="b"} -3e4 123
up{job="override"} 1 123
scrape_samples_scraped{job="override"} 2 123
scrape_response_size_bytes{job="override"} 68 123
scrape_duration_seconds{job="override"} 0 123
scrape_samples_post_metric_relabeling{job="override"} 2 123
scrape_series_added{job="override"} 2 123
scrape_timeout_seconds{job="override"} 42 123
`)
f(`
foo{bar="baz"} 34.44
bar{a="b",c="d"} -3e4
`, &ScrapeWork{
ScrapeTimeout: time.Second * 42,
HonorLabels: true,
Labels: promutils.NewLabelsFromMap(map[string]string{
"job": "xx",
"__address__": "foo.com",
}),
MetricRelabelConfigs: mustParseRelabelConfigs(`
- action: replace
source_labels: ["__address__", "job"]
separator: "/"
target_label: "instance"
- action: labeldrop
regex: c
`),
}, `
foo{bar="baz",job="xx",instance="foo.com/xx"} 34.44 123
bar{a="b",job="xx",instance="foo.com/xx"} -3e4 123
up{job="xx"} 1 123
scrape_samples_scraped{job="xx"} 2 123
scrape_response_size_bytes{job="xx"} 49 123
scrape_duration_seconds{job="xx"} 0 123
scrape_samples_post_metric_relabeling{job="xx"} 2 123
scrape_series_added{job="xx"} 2 123
scrape_timeout_seconds{job="xx"} 42 123
`)
f(`
foo{bar="baz"} 34.44
bar{a="b",c="d",} -3e4
dropme{foo="bar"} 334
dropme{xxx="yy",ss="dsf"} 843
`, &ScrapeWork{
ScrapeTimeout: time.Second * 42,
HonorLabels: true,
Labels: promutils.NewLabelsFromMap(map[string]string{
"job": "xx",
"instance": "foo.com",
}),
MetricRelabelConfigs: mustParseRelabelConfigs(`
- action: drop
separator: ""
source_labels: [a, c]
regex: "^bd$"
- action: drop
source_labels: [__name__]
regex: "dropme|up"
`),
}, `
foo{bar="baz",job="xx",instance="foo.com"} 34.44 123
up{job="xx",instance="foo.com"} 1 123
scrape_samples_scraped{job="xx",instance="foo.com"} 4 123
scrape_response_size_bytes{job="xx",instance="foo.com"} 106 123
scrape_duration_seconds{job="xx",instance="foo.com"} 0 123
scrape_samples_post_metric_relabeling{job="xx",instance="foo.com"} 1 123
scrape_series_added{job="xx",instance="foo.com"} 4 123
scrape_timeout_seconds{job="xx",instance="foo.com"} 42 123
`)
// Scrape metrics with names clashing with auto metrics
// See https://github.com/VictoriaMetrics/VictoriaMetrics/issues/3406
f(`
up{bar="baz"} 34.44
bar{a="b",c="d"} -3e4
scrape_series_added 3.435
`, &ScrapeWork{
ScrapeTimeout: time.Second * 42,
}, `
up{bar="baz"} 34.44 123
bar{a="b",c="d"} -3e4 123
exported_scrape_series_added 3.435 123
up 1 123
scrape_duration_seconds 0 123
scrape_response_size_bytes 76 123
scrape_samples_scraped 3 123
scrape_samples_post_metric_relabeling 3 123
scrape_timeout_seconds 42 123
scrape_series_added 3 123
`)
f(`
up{bar="baz"} 34.44
bar{a="b",c="d"} -3e4
scrape_series_added 3.435
`, &ScrapeWork{
ScrapeTimeout: time.Second * 42,
HonorLabels: true,
}, `
up{bar="baz"} 34.44 123
bar{a="b",c="d"} -3e4 123
scrape_series_added 3.435 123
up 1 123
scrape_samples_scraped 3 123
scrape_response_size_bytes 76 123
scrape_duration_seconds 0 123
scrape_samples_post_metric_relabeling 3 123
scrape_series_added 3 123
scrape_timeout_seconds 42 123
`)
// Scrape success with the given SampleLimit.
f(`
foo{bar="baz"} 34.44
bar{a="b",c="d"} -3e4
`, &ScrapeWork{
ScrapeTimeout: time.Second * 42,
SampleLimit: 2,
}, `
foo{bar="baz"} 34.44 123
bar{a="b",c="d"} -3e4 123
up 1 123
scrape_samples_limit 2 123
scrape_response_size_bytes 49 123
scrape_samples_scraped 2 123
scrape_duration_seconds 0 123
scrape_samples_post_metric_relabeling 2 123
scrape_series_added 2 123
scrape_timeout_seconds 42 123
`)
// Scrape failure because of the exceeded SampleLimit
f(`
foo{bar="baz"} 34.44
bar{a="b",c="d"} -3e4
`, &ScrapeWork{
ScrapeTimeout: time.Second * 42,
HonorLabels: true,
SampleLimit: 1,
SeriesLimit: 123,
}, `
up 0 123
scrape_samples_scraped 2 123
scrape_response_size_bytes 0 123
scrape_duration_seconds 0 123
scrape_samples_post_metric_relabeling 2 123
scrape_samples_limit 1 123
scrape_series_added 0 123
scrape_series_current 0 123
scrape_series_limit 123 123
scrape_series_limit_samples_dropped 0 123
scrape_timeout_seconds 42 123
`)
// Scrape success with the given SeriesLimit.
f(`
foo{bar="baz"} 34.44
bar{a="b",c="d"} -3e4
`, &ScrapeWork{
ScrapeTimeout: time.Second * 42,
SeriesLimit: 123,
}, `
foo{bar="baz"} 34.44 123
bar{a="b",c="d"} -3e4 123
up 1 123
scrape_samples_scraped 2 123
scrape_response_size_bytes 49 123
scrape_duration_seconds 0 123
scrape_samples_post_metric_relabeling 2 123
scrape_series_added 2 123
scrape_series_current 2 123
scrape_series_limit 123 123
scrape_series_limit_samples_dropped 0 123
scrape_timeout_seconds 42 123
`)
// Exceed SeriesLimit.
f(`
foo{bar="baz"} 34.44
bar{a="b",c="d"} -3e4
`, &ScrapeWork{
ScrapeTimeout: time.Second * 42,
SeriesLimit: 1,
}, `
foo{bar="baz"} 34.44 123
up 1 123
scrape_samples_scraped 2 123
scrape_response_size_bytes 49 123
scrape_duration_seconds 0 123
scrape_samples_post_metric_relabeling 2 123
scrape_series_added 2 123
scrape_series_current 1 123
scrape_series_limit 1 123
scrape_series_limit_samples_dropped 1 123
scrape_timeout_seconds 42 123
`)
}
func TestAddRowToTimeseriesNoRelabeling(t *testing.T) {
f := func(row string, cfg *ScrapeWork, dataExpected string) {
t.Helper()
sw := scrapeWork{
Config: cfg,
}
var wc writeRequestCtx
r := parsePromRow(row)
sw.addRowToTimeseries(&wc, r, r.Timestamp, false)
tss := wc.writeRequest.Timeseries
tssExpected := parseData(dataExpected)
if err := expectEqualTimeseries(tss, tssExpected); err != nil {
t.Fatalf("%s\ngot\n%v\nwant\n%v", err, tss, tssExpected)
}
}
// HonorLabels=false, empty Labels and ExternalLabels
f(`metric 0 123`,
&ScrapeWork{
HonorLabels: false,
},
`metric 0 123`)
f(`metric{a="f"} 0 123`,
&ScrapeWork{
HonorLabels: false,
},
`metric{a="f"} 0 123`)
// HonorLabels=true, empty Labels and ExternalLabels
f(`metric 0 123`,
&ScrapeWork{
HonorLabels: true,
},
`metric 0 123`)
f(`metric{a="f"} 0 123`,
&ScrapeWork{
HonorLabels: true,
},
`metric{a="f"} 0 123`)
// HonorLabels=false, non-empty Labels
f(`metric 0 123`,
&ScrapeWork{
Labels: promutils.NewLabelsFromMap(map[string]string{
"a": "f",
}),
HonorLabels: false,
},
`metric{a="f"} 0 123`)
f(`metric{foo="bar"} 0 123`,
&ScrapeWork{
Labels: promutils.NewLabelsFromMap(map[string]string{
"a": "f",
}),
HonorLabels: false,
},
`metric{a="f",foo="bar"} 0 123`)
// HonorLabels=true, non-empty Labels
f(`metric 0 123`,
&ScrapeWork{
Labels: promutils.NewLabelsFromMap(map[string]string{
"a": "f",
}),
HonorLabels: true,
},
`metric{a="f"} 0 123`)
f(`metric{foo="bar"} 0 123`,
&ScrapeWork{
Labels: promutils.NewLabelsFromMap(map[string]string{
"a": "f",
}),
HonorLabels: true,
},
`metric{a="f",foo="bar"} 0 123`)
// HonorLabels=false, non-empty ExternalLabels
f(`metric 0 123`,
&ScrapeWork{
ExternalLabels: promutils.NewLabelsFromMap(map[string]string{
"a": "f",
}),
HonorLabels: false,
},
`metric{a="f"} 0 123`)
f(`metric{foo="bar"} 0 123`,
&ScrapeWork{
ExternalLabels: promutils.NewLabelsFromMap(map[string]string{
"a": "f",
}),
HonorLabels: false,
},
`metric{a="f",foo="bar"} 0 123`)
// HonorLabels=true, non-empty ExternalLabels
f(`metric 0 123`,
&ScrapeWork{
ExternalLabels: promutils.NewLabelsFromMap(map[string]string{
"a": "f",
}),
HonorLabels: true,
},
`metric{a="f"} 0 123`)
f(`metric{foo="bar"} 0 123`,
&ScrapeWork{
ExternalLabels: promutils.NewLabelsFromMap(map[string]string{
"a": "f",
}),
HonorLabels: true,
},
`metric{a="f",foo="bar"} 0 123`)
// HonorLabels=false, non-empty Labels and ExternalLabels
f(`metric 0 123`,
&ScrapeWork{
Labels: promutils.NewLabelsFromMap(map[string]string{
"x": "y",
}),
ExternalLabels: promutils.NewLabelsFromMap(map[string]string{
"a": "f",
}),
HonorLabels: false,
},
`metric{a="f",x="y"} 0 123`)
f(`metric{foo="bar"} 0 123`,
&ScrapeWork{
Labels: promutils.NewLabelsFromMap(map[string]string{
"x": "y",
}),
ExternalLabels: promutils.NewLabelsFromMap(map[string]string{
"a": "f",
}),
HonorLabels: false,
},
`metric{a="f",foo="bar",x="y"} 0 123`)
// HonorLabels=true, non-empty Labels and ExternalLabels
f(`metric 0 123`,
&ScrapeWork{
Labels: promutils.NewLabelsFromMap(map[string]string{
"x": "y",
}),
ExternalLabels: promutils.NewLabelsFromMap(map[string]string{
"a": "f",
}),
HonorLabels: true,
},
`metric{a="f",x="y"} 0 123`)
f(`metric{foo="bar"} 0 123`,
&ScrapeWork{
Labels: promutils.NewLabelsFromMap(map[string]string{
"x": "y",
}),
ExternalLabels: promutils.NewLabelsFromMap(map[string]string{
"a": "f",
}),
HonorLabels: true,
},
`metric{a="f",foo="bar",x="y"} 0 123`)
// HonorLabels=false, clashing Labels and metric label
f(`metric{a="b"} 0 123`,
&ScrapeWork{
Labels: promutils.NewLabelsFromMap(map[string]string{
"a": "f",
}),
HonorLabels: false,
},
`metric{a="f",exported_a="b"} 0 123`)
// HonorLabels=true, clashing Labels and metric label
f(`metric{a="b"} 0 123`,
&ScrapeWork{
Labels: promutils.NewLabelsFromMap(map[string]string{
"a": "f",
}),
HonorLabels: true,
},
`metric{a="b"} 0 123`)
// HonorLabels=false, clashing ExternalLabels and metric label
f(`metric{a="b"} 0 123`,
&ScrapeWork{
ExternalLabels: promutils.NewLabelsFromMap(map[string]string{
"a": "f",
}),
HonorLabels: false,
},
`metric{a="f",exported_a="b"} 0 123`)
// HonorLabels=true, clashing ExternalLabels and metric label
f(`metric{a="b"} 0 123`,
&ScrapeWork{
ExternalLabels: promutils.NewLabelsFromMap(map[string]string{
"a": "f",
}),
HonorLabels: true,
},
`metric{a="b"} 0 123`)
// HonorLabels=false, clashing Labels and ExternalLAbels
f(`metric 0 123`,
&ScrapeWork{
Labels: promutils.NewLabelsFromMap(map[string]string{
"a": "e",
}),
ExternalLabels: promutils.NewLabelsFromMap(map[string]string{
"a": "f",
}),
HonorLabels: false,
},
`metric{a="f",exported_a="e"} 0 123`)
f(`metric{foo="bar"} 0 123`,
&ScrapeWork{
Labels: promutils.NewLabelsFromMap(map[string]string{
"a": "e",
}),
ExternalLabels: promutils.NewLabelsFromMap(map[string]string{
"a": "f",
}),
HonorLabels: false,
},
`metric{a="f",foo="bar",exported_a="e"} 0 123`)
// HonorLabels=true, clashing Labels and ExternalLAbels
f(`metric 0 123`,
&ScrapeWork{
Labels: promutils.NewLabelsFromMap(map[string]string{
"a": "e",
}),
ExternalLabels: promutils.NewLabelsFromMap(map[string]string{
"a": "f",
}),
HonorLabels: true,
},
`metric{a="e"} 0 123`)
f(`metric{foo="bar"} 0 123`,
&ScrapeWork{
Labels: promutils.NewLabelsFromMap(map[string]string{
"a": "e",
}),
ExternalLabels: promutils.NewLabelsFromMap(map[string]string{
"a": "f",
}),
HonorLabels: true,
},
`metric{a="e",foo="bar"} 0 123`)
}
func TestSendStaleSeries(t *testing.T) {
f := func(lastScrape, currScrape string, staleMarksExpected int) {
t.Helper()
var sw scrapeWork
sw.Config = &ScrapeWork{
NoStaleMarkers: false,
}
common.StartUnmarshalWorkers()
defer common.StopUnmarshalWorkers()
var staleMarks int
sw.PushData = func(_ *auth.Token, wr *prompbmarshal.WriteRequest) {
staleMarks += len(wr.Timeseries)
}
sw.sendStaleSeries(lastScrape, currScrape, 0, false)
if staleMarks != staleMarksExpected {
t.Fatalf("unexpected number of stale marks; got %d; want %d", staleMarks, staleMarksExpected)
}
}
generateScrape := func(n int) string {
w := strings.Builder{}
for i := 0; i < n; i++ {
w.WriteString(fmt.Sprintf("foo_%d 1\n", i))
}
return w.String()
}
f("", "", 0)
f(generateScrape(10), generateScrape(10), 0)
f(generateScrape(10), "", 10)
f("", generateScrape(10), 0)
f(generateScrape(10), generateScrape(3), 7)
f(generateScrape(3), generateScrape(10), 0)
f(generateScrape(20000), generateScrape(10), 19990)
}
func parsePromRow(data string) *parser.Row {
var rows parser.Rows
errLogger := func(s string) {
panic(fmt.Errorf("unexpected error when unmarshaling Prometheus rows: %s", s))
}
rows.UnmarshalWithErrLogger(data, errLogger)
if len(rows.Rows) != 1 {
panic(fmt.Errorf("unexpected number of rows parsed from %q; got %d; want %d", data, len(rows.Rows), 1))
}
return &rows.Rows[0]
}
func parseData(data string) []prompbmarshal.TimeSeries {
var rows parser.Rows
errLogger := func(s string) {
panic(fmt.Errorf("unexpected error when unmarshaling Prometheus rows: %s", s))
}
rows.UnmarshalWithErrLogger(data, errLogger)
var tss []prompbmarshal.TimeSeries
for _, r := range rows.Rows {
labels := []prompbmarshal.Label{
{
Name: "__name__",
Value: r.Metric,
},
}
for _, tag := range r.Tags {
labels = append(labels, prompbmarshal.Label{
Name: tag.Key,
Value: tag.Value,
})
}
var ts prompbmarshal.TimeSeries
ts.Labels = labels
ts.Samples = []prompbmarshal.Sample{
{
Value: r.Value,
Timestamp: r.Timestamp,
},
}
tss = append(tss, ts)
}
return tss
}
func expectEqualTimeseries(tss, tssExpected []prompbmarshal.TimeSeries) error {
m, err := timeseriesToMap(tss)
if err != nil {
return fmt.Errorf("invalid generated timeseries: %w", err)
}
mExpected, err := timeseriesToMap(tssExpected)
if err != nil {
return fmt.Errorf("invalid expected timeseries: %w", err)
}
if len(m) != len(mExpected) {
return fmt.Errorf("unexpected time series len; got %d; want %d", len(m), len(mExpected))
}
for k, tsExpected := range mExpected {
ts := m[k]
if ts != tsExpected {
return fmt.Errorf("unexpected timeseries %q;\ngot\n%s\nwant\n%s", k, ts, tsExpected)
}
}
return nil
}
func timeseriesToMap(tss []prompbmarshal.TimeSeries) (map[string]string, error) {
m := make(map[string]string, len(tss))
for i := range tss {
ts := &tss[i]
if len(ts.Labels) == 0 {
return nil, fmt.Errorf("unexpected empty labels for timeseries #%d; timeseries: %#v", i, ts)
}
if len(ts.Samples) != 1 {
return nil, fmt.Errorf("unexpected number of samples for timeseries #%d; got %d; want %d", i, len(ts.Samples), 1)
}
if ts.Labels[0].Name != "__name__" {
return nil, fmt.Errorf("unexpected first name for timeseries #%d; got %q; want %q", i, ts.Labels[0].Name, "__name__")
}
if ts.Labels[0].Value == "scrape_duration_seconds" {
// Reset scrape_duration_seconds value to 0, since it is non-deterministic
ts.Samples[0].Value = 0
}
m[ts.Labels[0].Value] = timeseriesToString(ts)
}
return m, nil
}
func timeseriesToString(ts *prompbmarshal.TimeSeries) string {
promrelabel.SortLabels(ts.Labels)
var sb strings.Builder
fmt.Fprintf(&sb, "{")
for i, label := range ts.Labels {
fmt.Fprintf(&sb, "%s=%q", label.Name, label.Value)
if i+1 < len(ts.Labels) {
fmt.Fprintf(&sb, ",")
}
}
fmt.Fprintf(&sb, "} ")
if len(ts.Samples) != 1 {
panic(fmt.Errorf("expecting a single sample; got %d samples", len(ts.Samples)))
}
s := ts.Samples[0]
fmt.Fprintf(&sb, "%g %d", s.Value, s.Timestamp)
return sb.String()
}
func mustParseRelabelConfigs(config string) *promrelabel.ParsedConfigs {
pcs, err := promrelabel.ParseRelabelConfigsData([]byte(config))
if err != nil {
panic(fmt.Errorf("cannot parse %q: %w", config, err))
}
return pcs
}