lib/promscrape: generate scrape_timeout_seconds metric per each scrape target in the same way as Prometheus 2.30 does

See https://github.com/prometheus/prometheus/pull/9247
This commit is contained in:
Aliaksandr Valialkin 2021-09-12 15:20:42 +03:00
parent 279f37c9e7
commit d90834da70
4 changed files with 43 additions and 14 deletions

View file

@ -10,7 +10,8 @@ sort: 15
* FEATURE: vmagent: send stale markers for disappeared metrics like Prometheus does. Previously stale markers were sent only when the scrape target disappears or when it becomes temporarily unavailable. See [these docs](https://docs.victoriametrics.com/vmagent.html#prometheus-staleness-markers) for details.
* FEATURE: vmagent: add ability to set `series_limit` option for a particular scrape target via `__series_limit__` label. This allows setting the limit on the number of time series on a per-target basis. See [these docs](https://docs.victoriametrics.com/vmagent.html#cardinality-limiter) for details.
* FEATURE: vmagent: add ability to set `stream_parse` option for a particular scrape target via `__stream_parse__` label. This allows managing the stream parsing mode on a per-target basis. See [these docs](https://docs.victoriametrics.com/vmagent.html#stream-parsing-mode) for details.
* FEATURE: vmagent: add ability to set `scrape_interval` and `scrape_timeout` options for a particular target via `__scrape_interval__` and `__scrape_timeout__` labels in the same way as Prometheus does. See [this pull request](https://github.com/prometheus/prometheus/pull/8911).
* FEATURE: vmagent: add ability to set `scrape_interval` and `scrape_timeout` options for a particular target via `__scrape_interval__` and `__scrape_timeout__` labels in the same way as Prometheus 2.30 does. See [this pull request](https://github.com/prometheus/prometheus/pull/8911).
* FEATURE: vmagent: generate `scrape_timeout_seconds` metric per each scrape target, so the target saturation could be calculated with `scrape_duration_seconds / scrape_timeout_seconds`. See the corresponding [pull request from Prometheus 2.30](https://github.com/prometheus/prometheus/pull/9247).
* FEATURE: add new relabeling actions: `keep_metrics` and `drop_metrics`. This simplifies metrics filtering by metric names. See [these docs](https://docs.victoriametrics.com/vmagent.html#relabeling) for more details.
* FEATURE: allow splitting long `regex` in relabeling filters into an array of shorter regexps, which can be put into multiple lines for better readability and maintainability. See [these docs](https://docs.victoriametrics.com/vmagent.html#relabeling) for more details.
* FEATURE: vmagent: reduce CPU usage when calculating the number of newly added series per scrape (this number is sent to remote storage in `scrape_series_added` metric).

View file

@ -155,7 +155,7 @@ func newClient(sw *ScrapeWork) *client {
}
func (c *client) GetStreamReader() (*streamReader, error) {
deadline := time.Now().Add(c.hc.ReadTimeout)
deadline := time.Now().Add(c.sc.Timeout)
ctx, cancel := context.WithDeadline(context.Background(), deadline)
req, err := http.NewRequestWithContext(ctx, "GET", c.scrapeURL, nil)
if err != nil {

View file

@ -352,6 +352,7 @@ func (sw *scrapeWork) scrapeInternal(scrapeTimestamp, realTimestamp int64) error
sw.addAutoTimeseries(wc, "scrape_samples_scraped", float64(samplesScraped), scrapeTimestamp)
sw.addAutoTimeseries(wc, "scrape_samples_post_metric_relabeling", float64(samplesPostRelabeling), scrapeTimestamp)
sw.addAutoTimeseries(wc, "scrape_series_added", float64(seriesAdded), scrapeTimestamp)
sw.addAutoTimeseries(wc, "scrape_timeout_seconds", sw.Config.ScrapeTimeout.Seconds(), scrapeTimestamp)
sw.pushData(&wc.writeRequest)
sw.prevLabelsLen = len(wc.labels)
wc.reset()
@ -428,6 +429,7 @@ func (sw *scrapeWork) scrapeStream(scrapeTimestamp, realTimestamp int64) error {
// scrape_series_added isn't calculated in streaming mode,
// since it may need unlimited amounts of memory when scraping targets with millions of exposed metrics.
sw.addAutoTimeseries(wc, "scrape_series_added", 0, scrapeTimestamp)
sw.addAutoTimeseries(wc, "scrape_timeout_seconds", sw.Config.ScrapeTimeout.Seconds(), scrapeTimestamp)
sw.pushData(&wc.writeRequest)
sw.prevLabelsLen = len(wc.labels)
wc.reset()

View file

@ -4,6 +4,7 @@ import (
"fmt"
"strings"
"testing"
"time"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/prompbmarshal"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/promrelabel"
@ -44,11 +45,14 @@ func TestScrapeWorkScrapeInternalFailure(t *testing.T) {
scrape_duration_seconds 0 123
scrape_samples_post_metric_relabeling 0 123
scrape_series_added 0 123
scrape_timeout_seconds 42 123
`
timeseriesExpected := parseData(dataExpected)
var sw scrapeWork
sw.Config = &ScrapeWork{}
sw.Config = &ScrapeWork{
ScrapeTimeout: time.Second * 42,
}
readDataCalls := 0
sw.ReadData = func(dst []byte) ([]byte, error) {
@ -133,17 +137,22 @@ func TestScrapeWorkScrapeInternalSuccess(t *testing.T) {
}
}
f(``, &ScrapeWork{}, `
f(``, &ScrapeWork{
ScrapeTimeout: time.Second * 42,
}, `
up 1 123
scrape_samples_scraped 0 123
scrape_duration_seconds 0 123
scrape_samples_post_metric_relabeling 0 123
scrape_series_added 0 123
scrape_timeout_seconds 42 123
`)
f(`
foo{bar="baz",empty_label=""} 34.45 3
abc -2
`, &ScrapeWork{}, `
`, &ScrapeWork{
ScrapeTimeout: time.Second * 42,
}, `
foo{bar="baz"} 34.45 123
abc -2 123
up 1 123
@ -151,11 +160,13 @@ func TestScrapeWorkScrapeInternalSuccess(t *testing.T) {
scrape_duration_seconds 0 123
scrape_samples_post_metric_relabeling 2 123
scrape_series_added 2 123
scrape_timeout_seconds 42 123
`)
f(`
foo{bar="baz"} 34.45 3
abc -2
`, &ScrapeWork{
ScrapeTimeout: time.Second * 42,
HonorTimestamps: true,
Labels: []prompbmarshal.Label{
{
@ -171,12 +182,14 @@ func TestScrapeWorkScrapeInternalSuccess(t *testing.T) {
scrape_duration_seconds{foo="x"} 0 123
scrape_samples_post_metric_relabeling{foo="x"} 2 123
scrape_series_added{foo="x"} 2 123
scrape_timeout_seconds{foo="x"} 42 123
`)
f(`
foo{job="orig",bar="baz"} 34.45
bar{y="2",job="aa",a="b",job="bb",x="1"} -3e4 2345
`, &ScrapeWork{
HonorLabels: false,
ScrapeTimeout: time.Second * 42,
HonorLabels: false,
Labels: []prompbmarshal.Label{
{
Name: "job",
@ -191,13 +204,15 @@ func TestScrapeWorkScrapeInternalSuccess(t *testing.T) {
scrape_duration_seconds{job="override"} 0 123
scrape_samples_post_metric_relabeling{job="override"} 2 123
scrape_series_added{job="override"} 2 123
scrape_timeout_seconds{job="override"} 42 123
`)
// Empty instance override. See https://github.com/VictoriaMetrics/VictoriaMetrics/issues/453
f(`
no_instance{instance="",job="some_job",label="val1",test=""} 5555
test_with_instance{instance="some_instance",job="some_job",label="val2",test=""} 1555
`, &ScrapeWork{
HonorLabels: true,
ScrapeTimeout: time.Second * 42,
HonorLabels: true,
Labels: []prompbmarshal.Label{
{
Name: "instance",
@ -216,12 +231,14 @@ func TestScrapeWorkScrapeInternalSuccess(t *testing.T) {
scrape_duration_seconds{instance="foobar",job="xxx"} 0 123
scrape_samples_post_metric_relabeling{instance="foobar",job="xxx"} 2 123
scrape_series_added{instance="foobar",job="xxx"} 2 123
scrape_timeout_seconds{instance="foobar",job="xxx"} 42 123
`)
f(`
no_instance{instance="",job="some_job",label="val1",test=""} 5555
test_with_instance{instance="some_instance",job="some_job",label="val2",test=""} 1555
`, &ScrapeWork{
HonorLabels: false,
ScrapeTimeout: time.Second * 42,
HonorLabels: false,
Labels: []prompbmarshal.Label{
{
Name: "instance",
@ -240,12 +257,14 @@ func TestScrapeWorkScrapeInternalSuccess(t *testing.T) {
scrape_duration_seconds{instance="foobar",job="xxx"} 0 123
scrape_samples_post_metric_relabeling{instance="foobar",job="xxx"} 2 123
scrape_series_added{instance="foobar",job="xxx"} 2 123
scrape_timeout_seconds{instance="foobar",job="xxx"} 42 123
`)
f(`
foo{job="orig",bar="baz"} 34.45
bar{job="aa",a="b",job="bb"} -3e4 2345
`, &ScrapeWork{
HonorLabels: true,
ScrapeTimeout: time.Second * 42,
HonorLabels: true,
Labels: []prompbmarshal.Label{
{
Name: "job",
@ -260,12 +279,14 @@ func TestScrapeWorkScrapeInternalSuccess(t *testing.T) {
scrape_duration_seconds{job="override"} 0 123
scrape_samples_post_metric_relabeling{job="override"} 2 123
scrape_series_added{job="override"} 2 123
scrape_timeout_seconds{job="override"} 42 123
`)
f(`
foo{bar="baz"} 34.44
bar{a="b",c="d"} -3e4
`, &ScrapeWork{
HonorLabels: true,
ScrapeTimeout: time.Second * 42,
HonorLabels: true,
Labels: []prompbmarshal.Label{
{
Name: "job",
@ -292,6 +313,7 @@ func TestScrapeWorkScrapeInternalSuccess(t *testing.T) {
scrape_duration_seconds{job="xx"} 0 123
scrape_samples_post_metric_relabeling{job="xx"} 2 123
scrape_series_added{job="xx"} 2 123
scrape_timeout_seconds{job="xx"} 42 123
`)
f(`
foo{bar="baz"} 34.44
@ -299,7 +321,8 @@ func TestScrapeWorkScrapeInternalSuccess(t *testing.T) {
dropme{foo="bar"} 334
dropme{xxx="yy",ss="dsf"} 843
`, &ScrapeWork{
HonorLabels: true,
ScrapeTimeout: time.Second * 42,
HonorLabels: true,
Labels: []prompbmarshal.Label{
{
Name: "job",
@ -326,20 +349,23 @@ func TestScrapeWorkScrapeInternalSuccess(t *testing.T) {
scrape_duration_seconds{job="xx",instance="foo.com"} 0 123
scrape_samples_post_metric_relabeling{job="xx",instance="foo.com"} 1 123
scrape_series_added{job="xx",instance="foo.com"} 4 123
scrape_timeout_seconds{job="xx",instance="foo.com"} 42 123
`)
f(`
foo{bar="baz"} 34.44
bar{a="b",c="d"} -3e4
`, &ScrapeWork{
HonorLabels: true,
SampleLimit: 1,
SeriesLimit: 123,
ScrapeTimeout: time.Second * 42,
HonorLabels: true,
SampleLimit: 1,
SeriesLimit: 123,
}, `
up 0 123
scrape_samples_scraped 2 123
scrape_duration_seconds 0 123
scrape_samples_post_metric_relabeling 2 123
scrape_series_added 0 123
scrape_timeout_seconds 42 123
`)
}