lib/promscrape: add ability to configure scrape_timeout and scrape_interval via relabeling

See https://github.com/prometheus/prometheus/pull/8911
This commit is contained in:
Aliaksandr Valialkin 2021-09-12 13:33:39 +03:00
parent 09670479cd
commit 6c97388dde
4 changed files with 182 additions and 15 deletions

View file

@ -10,6 +10,7 @@ sort: 15
* FEATURE: vmagent: send stale markers for disappeared metrics like Prometheus does. Previously stale markers were sent only when the scrape target disappears or when it becomes temporarily unavailable. See [these docs](https://docs.victoriametrics.com/vmagent.html#prometheus-staleness-markers) for details.
* FEATURE: vmagent: add ability to set `series_limit` option for a particular scrape target via `__series_limit__` label. This allows setting the limit on the number of time series on a per-target basis. See [these docs](https://docs.victoriametrics.com/vmagent.html#cardinality-limiter) for details.
* FEATURE: vmagent: add ability to set `stream_parse` option for a particular scrape target via `__stream_parse__` label. This allows managing the stream parsing mode on a per-target basis. See [these docs](https://docs.victoriametrics.com/vmagent.html#stream-parsing-mode) for details.
* FEATURE: vmagent: add ability to set `scrape_interval` and `scrape_timeout` options for a particular target via `__scrape_interval__` and `__scrape_timeout__` labels in the same way as Prometheus does. See [this pull request](https://github.com/prometheus/prometheus/pull/8911).
* FEATURE: add new relabeling actions: `keep_metrics` and `drop_metrics`. This simplifies metrics filtering by metric names. See [these docs](https://docs.victoriametrics.com/vmagent.html#relabeling) for more details.
* FEATURE: allow splitting long `regex` in relabeling filters into an array of shorter regexps, which can be put into multiple lines for better readability and maintainability. See [these docs](https://docs.victoriametrics.com/vmagent.html#relabeling) for more details.
* FEATURE: vmagent: reduce CPU usage when calculating the number of newly added series per scrape (this number is sent to remote storage in `scrape_series_added` metric).

View file

@ -986,7 +986,7 @@ func (swc *scrapeWorkConfig) getScrapeWork(target string, extraLabels, metaLabel
}
}
labels := mergeLabels(swc.jobName, swc.scheme, target, swc.metricsPath, extraLabels, swc.externalLabels, metaLabels, swc.params)
labels := mergeLabels(swc, target, extraLabels, metaLabels)
var originalLabels []prompbmarshal.Label
if !*dropOriginalLabels {
originalLabels = append([]prompbmarshal.Label{}, labels...)
@ -1049,6 +1049,23 @@ func (swc *scrapeWorkConfig) getScrapeWork(target string, extraLabels, metaLabel
})
promrelabel.SortLabels(labels)
}
// Read __scrape_interval__ and __scrape_timeout__ from labels.
scrapeInterval := swc.scrapeInterval
if s := promrelabel.GetLabelValueByName(labels, "__scrape_interval__"); len(s) > 0 {
d, err := time.ParseDuration(s)
if err != nil {
return nil, fmt.Errorf("cannot parse __scrape_interval__=%q: %w", s, err)
}
scrapeInterval = d
}
scrapeTimeout := swc.scrapeTimeout
if s := promrelabel.GetLabelValueByName(labels, "__scrape_timeout__"); len(s) > 0 {
d, err := time.ParseDuration(s)
if err != nil {
return nil, fmt.Errorf("cannot parse __scrape_timeout__=%q: %w", s, err)
}
scrapeTimeout = d
}
// Read series_limit option from __series_limit__ label.
// See https://docs.victoriametrics.com/vmagent.html#cardinality-limiter
seriesLimit := swc.seriesLimit
@ -1073,8 +1090,8 @@ func (swc *scrapeWorkConfig) getScrapeWork(target string, extraLabels, metaLabel
internLabelStrings(labels)
sw := &ScrapeWork{
ScrapeURL: scrapeURL,
ScrapeInterval: swc.scrapeInterval,
ScrapeTimeout: swc.scrapeTimeout,
ScrapeInterval: scrapeInterval,
ScrapeTimeout: scrapeTimeout,
HonorLabels: swc.honorLabels,
HonorTimestamps: swc.honorTimestamps,
DenyRedirects: swc.denyRedirects,
@ -1144,17 +1161,19 @@ func getParamsFromLabels(labels []prompbmarshal.Label, paramsOrig map[string][]s
return m
}
func mergeLabels(job, scheme, target, metricsPath string, extraLabels, externalLabels, metaLabels map[string]string, params map[string][]string) []prompbmarshal.Label {
func mergeLabels(swc *scrapeWorkConfig, target string, extraLabels, metaLabels map[string]string) []prompbmarshal.Label {
// See https://prometheus.io/docs/prometheus/latest/configuration/configuration/#relabel_config
m := make(map[string]string, 4+len(externalLabels)+len(params)+len(extraLabels)+len(metaLabels))
for k, v := range externalLabels {
m := make(map[string]string, 4+len(swc.externalLabels)+len(swc.params)+len(extraLabels)+len(metaLabels))
for k, v := range swc.externalLabels {
m[k] = v
}
m["job"] = job
m["job"] = swc.jobName
m["__address__"] = target
m["__scheme__"] = scheme
m["__metrics_path__"] = metricsPath
for k, args := range params {
m["__scheme__"] = swc.scheme
m["__metrics_path__"] = swc.metricsPath
m["__scrape_interval__"] = swc.scrapeInterval.String()
m["__scrape_timeout__"] = swc.scrapeTimeout.String()
for k, args := range swc.params {
if len(args) == 0 {
continue
}

View file

@ -152,6 +152,14 @@ scrape_configs:
Name: "__scheme__",
Value: "http",
},
{
Name: "__scrape_interval__",
Value: "1m0s",
},
{
Name: "__scrape_timeout__",
Value: "10s",
},
{
Name: "instance",
Value: "8.8.8.8",
@ -581,6 +589,14 @@ scrape_configs:
Name: "__scheme__",
Value: "http",
},
{
Name: "__scrape_interval__",
Value: "1m0s",
},
{
Name: "__scrape_timeout__",
Value: "10s",
},
{
Name: "__vm_filepath",
Value: "",
@ -621,6 +637,14 @@ scrape_configs:
Name: "__scheme__",
Value: "http",
},
{
Name: "__scrape_interval__",
Value: "1m0s",
},
{
Name: "__scrape_timeout__",
Value: "10s",
},
{
Name: "__vm_filepath",
Value: "",
@ -661,6 +685,14 @@ scrape_configs:
Name: "__scheme__",
Value: "http",
},
{
Name: "__scrape_interval__",
Value: "1m0s",
},
{
Name: "__scrape_timeout__",
Value: "10s",
},
{
Name: "__vm_filepath",
Value: "",
@ -723,6 +755,14 @@ scrape_configs:
Name: "__scheme__",
Value: "http",
},
{
Name: "__scrape_interval__",
Value: "1m0s",
},
{
Name: "__scrape_timeout__",
Value: "10s",
},
{
Name: "instance",
Value: "foo.bar:1234",
@ -766,6 +806,14 @@ scrape_configs:
Name: "__scheme__",
Value: "http",
},
{
Name: "__scrape_interval__",
Value: "1m0s",
},
{
Name: "__scrape_timeout__",
Value: "10s",
},
{
Name: "datacenter",
Value: "foobar",
@ -794,7 +842,7 @@ global:
scrape_timeout: 34s
scrape_configs:
- job_name: foo
scrape_interval: 543s
scrape_interval: 54s
scrape_timeout: 12s
metrics_path: /foo/bar
scheme: https
@ -809,6 +857,7 @@ scrape_configs:
- targets: ["foo.bar", "aaa"]
labels:
x: y
__scrape_timeout__: "5s"
- job_name: qwer
tls_config:
server_name: foobar
@ -821,8 +870,8 @@ scrape_configs:
`, []*ScrapeWork{
{
ScrapeURL: "https://foo.bar:443/foo/bar?p=x%26y&p=%3D",
ScrapeInterval: 543 * time.Second,
ScrapeTimeout: 12 * time.Second,
ScrapeInterval: 54 * time.Second,
ScrapeTimeout: 5 * time.Second,
HonorLabels: true,
HonorTimestamps: true,
DenyRedirects: true,
@ -843,6 +892,14 @@ scrape_configs:
Name: "__scheme__",
Value: "https",
},
{
Name: "__scrape_interval__",
Value: "54s",
},
{
Name: "__scrape_timeout__",
Value: "5s",
},
{
Name: "instance",
Value: "foo.bar:443",
@ -863,8 +920,8 @@ scrape_configs:
},
{
ScrapeURL: "https://aaa:443/foo/bar?p=x%26y&p=%3D",
ScrapeInterval: 543 * time.Second,
ScrapeTimeout: 12 * time.Second,
ScrapeInterval: 54 * time.Second,
ScrapeTimeout: 5 * time.Second,
HonorLabels: true,
HonorTimestamps: true,
DenyRedirects: true,
@ -885,6 +942,14 @@ scrape_configs:
Name: "__scheme__",
Value: "https",
},
{
Name: "__scrape_interval__",
Value: "54s",
},
{
Name: "__scrape_timeout__",
Value: "5s",
},
{
Name: "instance",
Value: "aaa:443",
@ -920,6 +985,14 @@ scrape_configs:
Name: "__scheme__",
Value: "http",
},
{
Name: "__scrape_interval__",
Value: "8s",
},
{
Name: "__scrape_timeout__",
Value: "8s",
},
{
Name: "instance",
Value: "1.2.3.4:80",
@ -953,6 +1026,14 @@ scrape_configs:
Name: "__scheme__",
Value: "http",
},
{
Name: "__scrape_interval__",
Value: "8s",
},
{
Name: "__scrape_timeout__",
Value: "8s",
},
{
Name: "instance",
Value: "foobar:80",
@ -1024,6 +1105,14 @@ scrape_configs:
Name: "__scheme__",
Value: "http",
},
{
Name: "__scrape_interval__",
Value: "1m0s",
},
{
Name: "__scrape_timeout__",
Value: "10s",
},
{
Name: "hash",
Value: "82",
@ -1095,6 +1184,14 @@ scrape_configs:
Name: "__scheme__",
Value: "mailto",
},
{
Name: "__scrape_interval__",
Value: "1m0s",
},
{
Name: "__scrape_timeout__",
Value: "10s",
},
{
Name: "instance",
Value: "fake.addr",
@ -1180,6 +1277,14 @@ scrape_configs:
Name: "__scheme__",
Value: "http",
},
{
Name: "__scrape_interval__",
Value: "1m0s",
},
{
Name: "__scrape_timeout__",
Value: "10s",
},
{
Name: "instance",
Value: "foo.bar:1234",
@ -1221,6 +1326,14 @@ scrape_configs:
Name: "__scheme__",
Value: "http",
},
{
Name: "__scrape_interval__",
Value: "1m0s",
},
{
Name: "__scrape_timeout__",
Value: "10s",
},
{
Name: "instance",
Value: "foo.bar:1234",
@ -1258,6 +1371,14 @@ scrape_configs:
Name: "__scheme__",
Value: "http",
},
{
Name: "__scrape_interval__",
Value: "1m0s",
},
{
Name: "__scrape_timeout__",
Value: "10s",
},
{
Name: "instance",
Value: "foo.bar:1234",
@ -1313,6 +1434,14 @@ scrape_configs:
Name: "__scheme__",
Value: "http",
},
{
Name: "__scrape_interval__",
Value: "1m0s",
},
{
Name: "__scrape_timeout__",
Value: "10s",
},
{
Name: "foo",
Value: "bar",
@ -1386,6 +1515,14 @@ scrape_configs:
Name: "__scheme__",
Value: "http",
},
{
Name: "__scrape_interval__",
Value: "1m0s",
},
{
Name: "__scrape_timeout__",
Value: "10s",
},
{
Name: "__series_limit__",
Value: "1234",
@ -1441,6 +1578,14 @@ scrape_configs:
Name: "__scheme__",
Value: "http",
},
{
Name: "__scrape_interval__",
Value: "1m0s",
},
{
Name: "__scrape_timeout__",
Value: "10s",
},
{
Name: "instance",
Value: "foo.bar:1234",

View file

@ -69,6 +69,8 @@ type ScrapeWork struct {
// * __address__
// * __scheme__
// * __metrics_path__
// * __scrape_interval__
// * __scrape_timeout__
// * __param_<name>
// * __meta_*
// * user-defined labels set via `relabel_configs` section in `scrape_config`