mirror of
https://github.com/VictoriaMetrics/VictoriaMetrics.git
synced 2024-11-21 14:44:00 +00:00
lib/promscrape: add external_labels
from global
section of -promscrape.config
after the relabeling is applied to the scraped metrics
This aligns with Prometheus behaviour. See https://github.com/VictoriaMetrics/VictoriaMetrics/issues/3137
This commit is contained in:
parent
e5aa34b2e3
commit
735de9ee54
5 changed files with 112 additions and 39 deletions
|
@ -46,6 +46,7 @@ See [these docs](https://docs.victoriametrics.com/Cluster-VictoriaMetrics.html#m
|
|||
* FEATURE: [vmgateway](https://docs.victoriametrics.com/vmgateway.html): add ability to extract JWT authorization token from non-standard HTTP header by passing it via `-auth.httpHeader` command-line flag. See [this feature request](https://github.com/VictoriaMetrics/VictoriaMetrics/issues/3054).
|
||||
* FEATURE: [vmagent](https://docs.victoriametrics.com/vmagent.html): expose `__meta_ec2_region` label for [ec2_sd_config](https://docs.victoriametrics.com/sd_configs.html#ec2_sd_configs) in the same way as [Prometheus 2.39 does](https://github.com/prometheus/prometheus/pull/11326).
|
||||
* FEATURE: [vmagent](https://docs.victoriametrics.com/vmagent.html): accept data ingestion requests via paths starting from `/prometheus` prefix in the same way as [VictoriaMetrics does](https://docs.victoriametrics.com/#how-to-import-time-series-data). For example, `vmagent` now accepts Prometheus `remote_write` data via both `/api/v1/write` and `/prometheus/api/v1/write`. This simplifies switching between single-node VictoriaMetrics and `vmagent`.
|
||||
* FEATURE: [vmagent](https://docs.victoriametrics.com/vmagent.html): add `external_labels` from `global` section at `-promscrape.config` after the [relabeling](https://docs.victoriametrics.com/vmagent.html#relabeling) is applied to scraped metrics. This aligns with Prometheus behaviour. Previously the `external_labels` were added to scrape targets, so they could be modified during relabeling. See [this issue](https://github.com/VictoriaMetrics/VictoriaMetrics/issues/3137).
|
||||
|
||||
* BUGFIX: [vmagent](https://docs.victoriametrics.com/vmagent.html): properly encode query params for aws signed requests, use `%20` instead of `+` as api requires. See [this issue](https://github.com/VictoriaMetrics/VictoriaMetrics/issues/3171).
|
||||
* BUGFIX: [vmagent](https://docs.victoriametrics.com/vmagent.html): properly parse relabel config when regex ending with escaped `$`. See [this issue](https://github.com/VictoriaMetrics/VictoriaMetrics/issues/3131).
|
||||
|
|
|
@ -229,6 +229,22 @@ type GlobalConfig struct {
|
|||
ExternalLabels map[string]string `yaml:"external_labels,omitempty"`
|
||||
}
|
||||
|
||||
func (gc *GlobalConfig) getExternalLabels() []prompbmarshal.Label {
|
||||
externalLabels := gc.ExternalLabels
|
||||
if len(externalLabels) == 0 {
|
||||
return nil
|
||||
}
|
||||
labels := make([]prompbmarshal.Label, 0, len(externalLabels))
|
||||
for name, value := range externalLabels {
|
||||
labels = append(labels, prompbmarshal.Label{
|
||||
Name: name,
|
||||
Value: value,
|
||||
})
|
||||
}
|
||||
promrelabel.SortLabels(labels)
|
||||
return labels
|
||||
}
|
||||
|
||||
// ScrapeConfig represents essential parts for `scrape_config` section of Prometheus config.
|
||||
//
|
||||
// See https://prometheus.io/docs/prometheus/latest/configuration/configuration/#scrape_config
|
||||
|
@ -933,6 +949,7 @@ func getScrapeWorkConfig(sc *ScrapeConfig, baseDir string, globalCfg *GlobalConf
|
|||
if (*streamParse || sc.StreamParse) && sc.SeriesLimit > 0 {
|
||||
return nil, fmt.Errorf("cannot use stream parsing mode when `series_limit` is set for `job_name` %q", jobName)
|
||||
}
|
||||
externalLabels := globalCfg.getExternalLabels()
|
||||
swc := &scrapeWorkConfig{
|
||||
scrapeInterval: scrapeInterval,
|
||||
scrapeIntervalString: scrapeInterval.String(),
|
||||
|
@ -948,7 +965,7 @@ func getScrapeWorkConfig(sc *ScrapeConfig, baseDir string, globalCfg *GlobalConf
|
|||
honorLabels: honorLabels,
|
||||
honorTimestamps: honorTimestamps,
|
||||
denyRedirects: denyRedirects,
|
||||
externalLabels: globalCfg.ExternalLabels,
|
||||
externalLabels: externalLabels,
|
||||
relabelConfigs: relabelConfigs,
|
||||
metricRelabelConfigs: metricRelabelConfigs,
|
||||
sampleLimit: sc.SampleLimit,
|
||||
|
@ -977,7 +994,7 @@ type scrapeWorkConfig struct {
|
|||
honorLabels bool
|
||||
honorTimestamps bool
|
||||
denyRedirects bool
|
||||
externalLabels map[string]string
|
||||
externalLabels []prompbmarshal.Label
|
||||
relabelConfigs *promrelabel.ParsedConfigs
|
||||
metricRelabelConfigs *promrelabel.ParsedConfigs
|
||||
sampleLimit int
|
||||
|
@ -1308,6 +1325,7 @@ func (swc *scrapeWorkConfig) getScrapeWork(target string, extraLabels, metaLabel
|
|||
DenyRedirects: swc.denyRedirects,
|
||||
OriginalLabels: originalLabels,
|
||||
Labels: labels,
|
||||
ExternalLabels: swc.externalLabels,
|
||||
ProxyURL: swc.proxyURL,
|
||||
ProxyAuthConfig: swc.proxyAuthConfig,
|
||||
AuthConfig: swc.authConfig,
|
||||
|
@ -1357,9 +1375,6 @@ func mergeLabels(dst []prompbmarshal.Label, swc *scrapeWorkConfig, target string
|
|||
logger.Panicf("BUG: len(dst) must be 0; got %d", len(dst))
|
||||
}
|
||||
// See https://prometheus.io/docs/prometheus/latest/configuration/configuration/#relabel_config
|
||||
for k, v := range swc.externalLabels {
|
||||
dst = appendLabel(dst, k, v)
|
||||
}
|
||||
dst = appendLabel(dst, "job", swc.jobName)
|
||||
dst = appendLabel(dst, "__address__", target)
|
||||
dst = appendLabel(dst, "__scheme__", swc.scheme)
|
||||
|
|
|
@ -36,19 +36,11 @@ func TestMergeLabels(t *testing.T) {
|
|||
metricsPath: "/foo/bar",
|
||||
scrapeIntervalString: "15s",
|
||||
scrapeTimeoutString: "10s",
|
||||
externalLabels: map[string]string{
|
||||
"job": "bar",
|
||||
"a": "b",
|
||||
},
|
||||
}, "foo", nil, nil, `{__address__="foo",__metrics_path__="/foo/bar",__scheme__="https",__scrape_interval__="15s",__scrape_timeout__="10s",a="b",job="xyz"}`)
|
||||
}, "foo", nil, nil, `{__address__="foo",__metrics_path__="/foo/bar",__scheme__="https",__scrape_interval__="15s",__scrape_timeout__="10s",job="xyz"}`)
|
||||
f(&scrapeWorkConfig{
|
||||
jobName: "xyz",
|
||||
scheme: "https",
|
||||
metricsPath: "/foo/bar",
|
||||
externalLabels: map[string]string{
|
||||
"job": "bar",
|
||||
"a": "b",
|
||||
},
|
||||
}, "foo", map[string]string{
|
||||
"job": "extra_job",
|
||||
"foo": "extra_foo",
|
||||
|
@ -959,10 +951,6 @@ scrape_configs:
|
|||
Name: "__scrape_timeout__",
|
||||
Value: "10s",
|
||||
},
|
||||
{
|
||||
Name: "datacenter",
|
||||
Value: "foobar",
|
||||
},
|
||||
{
|
||||
Name: "instance",
|
||||
Value: "foo.bar:1234",
|
||||
|
@ -971,6 +959,12 @@ scrape_configs:
|
|||
Name: "job",
|
||||
Value: "foo",
|
||||
},
|
||||
},
|
||||
ExternalLabels: []prompbmarshal.Label{
|
||||
{
|
||||
Name: "datacenter",
|
||||
Value: "foobar",
|
||||
},
|
||||
{
|
||||
Name: "jobs",
|
||||
Value: "xxx",
|
||||
|
@ -1604,6 +1598,24 @@ scrape_configs:
|
|||
Name: "job",
|
||||
Value: "yyy",
|
||||
},
|
||||
},
|
||||
ExternalLabels: []prompbmarshal.Label{
|
||||
{
|
||||
Name: "__address__",
|
||||
Value: "aaasdf",
|
||||
},
|
||||
{
|
||||
Name: "__param_a",
|
||||
Value: "jlfd",
|
||||
},
|
||||
{
|
||||
Name: "foo",
|
||||
Value: "xx",
|
||||
},
|
||||
{
|
||||
Name: "job",
|
||||
Value: "foobar",
|
||||
},
|
||||
{
|
||||
Name: "q",
|
||||
Value: "qwe",
|
||||
|
|
|
@ -87,6 +87,12 @@ type ScrapeWork struct {
|
|||
// See also https://prometheus.io/docs/concepts/jobs_instances/
|
||||
Labels []prompbmarshal.Label
|
||||
|
||||
// ExternalLabels contains labels from global->external_labels section of -promscrape.config
|
||||
//
|
||||
// These labels are added to scraped metrics after the relabeling.
|
||||
// See https://github.com/VictoriaMetrics/VictoriaMetrics/issues/3137
|
||||
ExternalLabels []prompbmarshal.Label
|
||||
|
||||
// ProxyURL HTTP proxy url
|
||||
ProxyURL *proxy.URL
|
||||
|
||||
|
@ -140,9 +146,11 @@ func (sw *ScrapeWork) key() string {
|
|||
// Do not take into account OriginalLabels, since they can be changed with relabeling.
|
||||
// Take into account JobNameOriginal in order to capture the case when the original job_name is changed via relabeling.
|
||||
key := fmt.Sprintf("JobNameOriginal=%s, ScrapeURL=%s, ScrapeInterval=%s, ScrapeTimeout=%s, HonorLabels=%v, HonorTimestamps=%v, DenyRedirects=%v, Labels=%s, "+
|
||||
"ExternalLabels=%s, "+
|
||||
"ProxyURL=%s, ProxyAuthConfig=%s, AuthConfig=%s, MetricRelabelConfigs=%s, SampleLimit=%d, DisableCompression=%v, DisableKeepAlive=%v, StreamParse=%v, "+
|
||||
"ScrapeAlignInterval=%s, ScrapeOffset=%s, SeriesLimit=%d",
|
||||
sw.jobNameOriginal, sw.ScrapeURL, sw.ScrapeInterval, sw.ScrapeTimeout, sw.HonorLabels, sw.HonorTimestamps, sw.DenyRedirects, sw.LabelsString(),
|
||||
promLabelsString(sw.ExternalLabels),
|
||||
sw.ProxyURL.String(), sw.ProxyAuthConfig.String(),
|
||||
sw.AuthConfig.String(), sw.MetricRelabelConfigs.String(), sw.SampleLimit, sw.DisableCompression, sw.DisableKeepAlive, sw.StreamParse,
|
||||
sw.ScrapeAlignInterval, sw.ScrapeOffset, sw.SeriesLimit)
|
||||
|
@ -835,6 +843,9 @@ func (sw *scrapeWork) addRowToTimeseries(wc *writeRequestCtx, r *parser.Row, tim
|
|||
// Skip row without labels.
|
||||
return
|
||||
}
|
||||
// Add labels from `global->external_labels` section after the relabeling like Prometheus does.
|
||||
// See https://github.com/VictoriaMetrics/VictoriaMetrics/issues/3137
|
||||
wc.labels = appendExtraLabels(wc.labels, sw.Config.ExternalLabels, labelsLen, sw.Config.HonorLabels)
|
||||
sampleTimestamp := r.Timestamp
|
||||
if !sw.Config.HonorTimestamps || sampleTimestamp == 0 {
|
||||
sampleTimestamp = timestamp
|
||||
|
@ -863,36 +874,43 @@ func appendLabels(dst []prompbmarshal.Label, metric string, src []parser.Tag, ex
|
|||
Value: tag.Value,
|
||||
})
|
||||
}
|
||||
dst = append(dst, extraLabels...)
|
||||
labels := dst[dstLen:]
|
||||
if len(labels) <= 1 {
|
||||
// Fast path - only a single label.
|
||||
return appendExtraLabels(dst, extraLabels, dstLen, honorLabels)
|
||||
}
|
||||
|
||||
func appendExtraLabels(dst, extraLabels []prompbmarshal.Label, offset int, honorLabels bool) []prompbmarshal.Label {
|
||||
// Add extraLabels to labels.
|
||||
// Handle duplicates in the same way as Prometheus does.
|
||||
if len(dst) > offset && dst[offset].Name == "__name__" {
|
||||
offset++
|
||||
}
|
||||
labels := dst[offset:]
|
||||
if len(labels) == 0 {
|
||||
// Fast path - add extraLabels to dst without the need to de-duplicate.
|
||||
dst = append(dst, extraLabels...)
|
||||
return dst
|
||||
}
|
||||
|
||||
// de-duplicate labels
|
||||
dstLabels := labels[:0]
|
||||
for i := range labels {
|
||||
label := &labels[i]
|
||||
prevLabel := promrelabel.GetLabelByName(dstLabels, label.Name)
|
||||
for _, label := range extraLabels {
|
||||
prevLabel := promrelabel.GetLabelByName(labels, label.Name)
|
||||
if prevLabel == nil {
|
||||
dstLabels = append(dstLabels, *label)
|
||||
// Fast path - the label doesn't exist in labels, so just add it to dst.
|
||||
dst = append(dst, label)
|
||||
continue
|
||||
}
|
||||
if honorLabels {
|
||||
// Skip the extra label with the same name.
|
||||
continue
|
||||
}
|
||||
// Rename the prevLabel to "exported_" + label.Name.
|
||||
// Rename the prevLabel to "exported_" + label.Name
|
||||
// See https://prometheus.io/docs/prometheus/latest/configuration/configuration/#scrape_config
|
||||
exportedName := "exported_" + label.Name
|
||||
if promrelabel.GetLabelByName(dstLabels, exportedName) != nil {
|
||||
// Override duplicate with the current label.
|
||||
*prevLabel = *label
|
||||
continue
|
||||
exportedLabel := promrelabel.GetLabelByName(labels, exportedName)
|
||||
if exportedLabel == nil {
|
||||
prevLabel.Name = exportedName
|
||||
dst = append(dst, label)
|
||||
} else {
|
||||
exportedLabel.Value = prevLabel.Value
|
||||
prevLabel.Value = label.Value
|
||||
}
|
||||
prevLabel.Name = exportedName
|
||||
dstLabels = append(dstLabels, *label)
|
||||
}
|
||||
return dst[:dstLen+len(dstLabels)]
|
||||
return dst
|
||||
}
|
||||
|
|
|
@ -12,6 +12,33 @@ import (
|
|||
parser "github.com/VictoriaMetrics/VictoriaMetrics/lib/protoparser/prometheus"
|
||||
)
|
||||
|
||||
func TestAppendExtraLabels(t *testing.T) {
|
||||
f := func(sourceLabels, extraLabels string, honorLabels bool, resultExpected string) {
|
||||
t.Helper()
|
||||
src := promrelabel.MustParseMetricWithLabels(sourceLabels)
|
||||
extra := promrelabel.MustParseMetricWithLabels(extraLabels)
|
||||
labels := appendExtraLabels(src, extra, 0, honorLabels)
|
||||
result := promLabelsString(labels)
|
||||
if result != resultExpected {
|
||||
t.Fatalf("unexpected result; got\n%s\nwant\n%s", result, resultExpected)
|
||||
}
|
||||
}
|
||||
f("{}", "{}", true, "{}")
|
||||
f("{}", "{}", false, "{}")
|
||||
f("foo", "{}", true, `{__name__="foo"}`)
|
||||
f("foo", "{}", false, `{__name__="foo"}`)
|
||||
f("foo", "bar", true, `{__name__="foo",__name__="bar"}`)
|
||||
f("foo", "bar", false, `{__name__="foo",__name__="bar"}`)
|
||||
f(`{a="b"}`, `{c="d"}`, true, `{a="b",c="d"}`)
|
||||
f(`{a="b"}`, `{c="d"}`, false, `{a="b",c="d"}`)
|
||||
f(`{a="b"}`, `{a="d"}`, true, `{a="b"}`)
|
||||
f(`{a="b"}`, `{a="d"}`, false, `{exported_a="b",a="d"}`)
|
||||
f(`{a="b",exported_a="x"}`, `{a="d"}`, true, `{a="b",exported_a="x"}`)
|
||||
f(`{a="b",exported_a="x"}`, `{a="d"}`, false, `{a="d",exported_a="b"}`)
|
||||
f(`{a="b"}`, `{a="d",exported_a="x"}`, true, `{a="b",exported_a="x"}`)
|
||||
f(`{a="b"}`, `{a="d",exported_a="x"}`, false, `{exported_a="b",a="d",exported_a="x"}`)
|
||||
}
|
||||
|
||||
func TestPromLabelsString(t *testing.T) {
|
||||
f := func(labels []prompbmarshal.Label, resultExpected string) {
|
||||
t.Helper()
|
||||
|
@ -187,7 +214,7 @@ func TestScrapeWorkScrapeInternalSuccess(t *testing.T) {
|
|||
`)
|
||||
f(`
|
||||
foo{job="orig",bar="baz"} 34.45
|
||||
bar{y="2",job="aa",a="b",job="bb",x="1"} -3e4 2345
|
||||
bar{y="2",job="aa",a="b",x="1"} -3e4 2345
|
||||
`, &ScrapeWork{
|
||||
ScrapeTimeout: time.Second * 42,
|
||||
HonorLabels: false,
|
||||
|
@ -262,7 +289,7 @@ func TestScrapeWorkScrapeInternalSuccess(t *testing.T) {
|
|||
`)
|
||||
f(`
|
||||
foo{job="orig",bar="baz"} 34.45
|
||||
bar{job="aa",a="b",job="bb"} -3e4 2345
|
||||
bar{job="aa",a="b"} -3e4 2345
|
||||
`, &ScrapeWork{
|
||||
ScrapeTimeout: time.Second * 42,
|
||||
HonorLabels: true,
|
||||
|
|
Loading…
Reference in a new issue