lib/promscrape: use local scrape timestamp for scraped metrics unless honor_timestamps: true is set explicitly

This fixes the case with gaps for metrics collected from cadvisor,
which exports invalid timestamps, which break staleness detection at VictoriaMetrics side.

See https://github.com/VictoriaMetrics/VictoriaMetrics/issues/4697 ,
https://github.com/VictoriaMetrics/VictoriaMetrics/issues/4697#issuecomment-1654614799
and https://github.com/VictoriaMetrics/VictoriaMetrics/issues/4697#issuecomment-1656540535

Updates https://github.com/VictoriaMetrics/VictoriaMetrics/issues/1773
This commit is contained in:
Aliaksandr Valialkin 2023-07-28 21:08:41 -07:00
parent eb8ee5aab6
commit 9c3f87fac2
No known key found for this signature in database
GPG key ID: A72BEC6CD3D0DED1
4 changed files with 75 additions and 100 deletions

View file

@ -23,6 +23,7 @@ The following tip changes can be tested by building VictoriaMetrics components f
* BUGFIX: [vmagent](https://docs.victoriametrics.com/vmagent.html): close HTTP connections to [service discovery](https://docs.victoriametrics.com/sd_configs.html) servers when they are no longer needed. This should prevent from possible connection exhasution in some cases. See [this issue](https://github.com/VictoriaMetrics/VictoriaMetrics/issues/4724).
* BUGFIX: [vmagent](https://docs.victoriametrics.com/vmagent.html): properly apply `if` filters during [relabeling](https://docs.victoriametrics.com/vmagent.html#relabeling-enhancements). Previously the `if` filter could improperly work. See [this issue](https://github.com/VictoriaMetrics/VictoriaMetrics/issues/4806) and [this pull request](https://github.com/VictoriaMetrics/VictoriaMetrics/pull/4816).
* BUGFIX: [vmagent](https://docs.victoriametrics.com/vmagent.html): fix possible panic at shutdown when [stream aggregation](https://docs.victoriametrics.com/stream-aggregation.html) is enabled. See [this pull request](https://github.com/VictoriaMetrics/VictoriaMetrics/pull/4407) for details.
* BUGFIX: [vmagent](https://docs.victoriametrics.com/vmagent.html): use local scrape timestamps for the scraped metrics unless `honor_timestamps: true` option is explicitly set at [scrape_config](https://docs.victoriametrics.com/sd_configs.html#scrape_configs). This fixes gaps for metrics collected from [cadvisor](https://github.com/google/cadvisor) or similar exporters, which export metrics with invalid timestamps. See [this issue](https://github.com/VictoriaMetrics/VictoriaMetrics/issues/4697) and [this comment](https://github.com/VictoriaMetrics/VictoriaMetrics/issues/4697#issuecomment-1654614799) for details.
* BUGFIX: [vmauth](https://docs.victoriametrics.com/vmauth.html): Properly handle LOCAL command for proxy protocol. See [this issue](https://github.com/VictoriaMetrics/VictoriaMetrics/issues/3335#issuecomment-1569864108).
* BUGFIX: [VictoriaMetrics cluster](https://docs.victoriametrics.com/Cluster-VictoriaMetrics.html): properly return error from [/api/v1/query](https://docs.victoriametrics.com/keyConcepts.html#instant-query) and [/api/v1/query_range](https://docs.victoriametrics.com/keyConcepts.html#range-query) at `vmselect` when the `-search.maxSamplesPerQuery` or `-search.maxSamplesPerSeries` [limit](https://docs.victoriametrics.com/Cluster-VictoriaMetrics.html#resource-usage-limits) is exceeded. Previously incomplete response could be returned without the error if `vmselect` runs with `-replicationFactor` greater than 1. See [this pull request](https://github.com/VictoriaMetrics/VictoriaMetrics/pull/4472).
* BUGFIX: [vmalert](https://docs.victoriametrics.com/vmalert.html): correctly calculate evaluation time for rules. Before, there was a low probability for discrepancy between actual time and rules evaluation time if evaluation interval was lower than the execution time for rules within the group.

View file

@ -1224,7 +1224,8 @@ scrape_configs:
# If honor_timestamps is set to "false", the timestamps of the metrics exposed
# by the target will be ignored.
#
# By default honor_timestamps is set to true.
# By default, honor_timestamps is set to false.
# See https://github.com/VictoriaMetrics/VictoriaMetrics/issues/4697#issuecomment-1656540535 for details.
# honor_timestamps: <boolean>
# scheme configures the protocol scheme used for requests.

View file

@ -242,7 +242,7 @@ type ScrapeConfig struct {
ScrapeTimeout *promutils.Duration `yaml:"scrape_timeout,omitempty"`
MetricsPath string `yaml:"metrics_path,omitempty"`
HonorLabels bool `yaml:"honor_labels,omitempty"`
HonorTimestamps *bool `yaml:"honor_timestamps,omitempty"`
HonorTimestamps bool `yaml:"honor_timestamps,omitempty"`
FollowRedirects *bool `yaml:"follow_redirects,omitempty"`
Scheme string `yaml:"scheme,omitempty"`
Params map[string][]string `yaml:"params,omitempty"`
@ -921,10 +921,7 @@ func getScrapeWorkConfig(sc *ScrapeConfig, baseDir string, globalCfg *GlobalConf
scrapeTimeout = scrapeInterval
}
honorLabels := sc.HonorLabels
honorTimestamps := true
if sc.HonorTimestamps != nil {
honorTimestamps = *sc.HonorTimestamps
}
honorTimestamps := sc.HonorTimestamps
denyRedirects := false
if sc.FollowRedirects != nil {
denyRedirects = !*sc.FollowRedirects

View file

@ -89,7 +89,7 @@ scrape_configs:
scrape_configs:
- job_name: foo
honor_labels: true
honor_timestamps: false
honor_timestamps: true
scheme: https
params:
foo:
@ -243,10 +243,9 @@ scrape_configs:
resetNonEssentialFields(sws)
swsExpected := []*ScrapeWork{
{
ScrapeURL: "http://host1:80/metric/path1?x=y",
ScrapeInterval: defaultScrapeInterval,
ScrapeTimeout: defaultScrapeTimeout,
HonorTimestamps: true,
ScrapeURL: "http://host1:80/metric/path1?x=y",
ScrapeInterval: defaultScrapeInterval,
ScrapeTimeout: defaultScrapeTimeout,
Labels: promutils.NewLabelsFromMap(map[string]string{
"instance": "host1:80",
"job": "abc",
@ -256,10 +255,9 @@ scrape_configs:
jobNameOriginal: "abc",
},
{
ScrapeURL: "https://host2:443/metric/path2?x=y",
ScrapeInterval: defaultScrapeInterval,
ScrapeTimeout: defaultScrapeTimeout,
HonorTimestamps: true,
ScrapeURL: "https://host2:443/metric/path2?x=y",
ScrapeInterval: defaultScrapeInterval,
ScrapeTimeout: defaultScrapeTimeout,
Labels: promutils.NewLabelsFromMap(map[string]string{
"instance": "host2:443",
"job": "abc",
@ -269,10 +267,9 @@ scrape_configs:
jobNameOriginal: "abc",
},
{
ScrapeURL: "http://host3:1234/metric/path3?arg1=value1&x=y",
ScrapeInterval: defaultScrapeInterval,
ScrapeTimeout: defaultScrapeTimeout,
HonorTimestamps: true,
ScrapeURL: "http://host3:1234/metric/path3?arg1=value1&x=y",
ScrapeInterval: defaultScrapeInterval,
ScrapeTimeout: defaultScrapeTimeout,
Labels: promutils.NewLabelsFromMap(map[string]string{
"instance": "host3:1234",
"job": "abc",
@ -282,10 +279,9 @@ scrape_configs:
jobNameOriginal: "abc",
},
{
ScrapeURL: "https://host4:1234/foo/bar?x=y",
ScrapeInterval: defaultScrapeInterval,
ScrapeTimeout: defaultScrapeTimeout,
HonorTimestamps: true,
ScrapeURL: "https://host4:1234/foo/bar?x=y",
ScrapeInterval: defaultScrapeInterval,
ScrapeTimeout: defaultScrapeTimeout,
Labels: promutils.NewLabelsFromMap(map[string]string{
"instance": "host4:1234",
"job": "abc",
@ -330,10 +326,9 @@ scrape_configs:
sws := cfg.getStaticScrapeWork()
resetNonEssentialFields(sws)
swsExpected := []*ScrapeWork{{
ScrapeURL: "http://black:9115/probe?module=dns_udp_example&target=8.8.8.8",
ScrapeInterval: defaultScrapeInterval,
ScrapeTimeout: defaultScrapeTimeout,
HonorTimestamps: true,
ScrapeURL: "http://black:9115/probe?module=dns_udp_example&target=8.8.8.8",
ScrapeInterval: defaultScrapeInterval,
ScrapeTimeout: defaultScrapeTimeout,
Labels: promutils.NewLabelsFromMap(map[string]string{
"instance": "8.8.8.8",
"job": "blackbox",
@ -757,10 +752,9 @@ scrape_configs:
- files: ["testdata/file_sd.json", "testdata/file_sd*.yml"]
`, []*ScrapeWork{
{
ScrapeURL: "http://host1:80/abc/de",
ScrapeInterval: defaultScrapeInterval,
ScrapeTimeout: defaultScrapeTimeout,
HonorTimestamps: true,
ScrapeURL: "http://host1:80/abc/de",
ScrapeInterval: defaultScrapeInterval,
ScrapeTimeout: defaultScrapeTimeout,
Labels: promutils.NewLabelsFromMap(map[string]string{
"instance": "host1:80",
"job": "foo",
@ -771,10 +765,9 @@ scrape_configs:
jobNameOriginal: "foo",
},
{
ScrapeURL: "http://host2:80/abc/de",
ScrapeInterval: defaultScrapeInterval,
ScrapeTimeout: defaultScrapeTimeout,
HonorTimestamps: true,
ScrapeURL: "http://host2:80/abc/de",
ScrapeInterval: defaultScrapeInterval,
ScrapeTimeout: defaultScrapeTimeout,
Labels: promutils.NewLabelsFromMap(map[string]string{
"instance": "host2:80",
"job": "foo",
@ -785,10 +778,9 @@ scrape_configs:
jobNameOriginal: "foo",
},
{
ScrapeURL: "http://localhost:9090/abc/de",
ScrapeInterval: defaultScrapeInterval,
ScrapeTimeout: defaultScrapeTimeout,
HonorTimestamps: true,
ScrapeURL: "http://localhost:9090/abc/de",
ScrapeInterval: defaultScrapeInterval,
ScrapeTimeout: defaultScrapeTimeout,
Labels: promutils.NewLabelsFromMap(map[string]string{
"instance": "localhost:9090",
"job": "foo",
@ -821,10 +813,9 @@ scrape_configs:
- targets: ["foo.bar:1234"]
`, []*ScrapeWork{
{
ScrapeURL: "http://foo.bar:1234/metrics",
ScrapeInterval: defaultScrapeInterval,
ScrapeTimeout: defaultScrapeTimeout,
HonorTimestamps: true,
ScrapeURL: "http://foo.bar:1234/metrics",
ScrapeInterval: defaultScrapeInterval,
ScrapeTimeout: defaultScrapeTimeout,
Labels: promutils.NewLabelsFromMap(map[string]string{
"instance": "foo.bar:1234",
"job": "foo",
@ -845,10 +836,9 @@ scrape_configs:
- targets: ["foo.bar:1234"]
`, []*ScrapeWork{
{
ScrapeURL: "http://foo.bar:1234/metrics",
ScrapeInterval: defaultScrapeInterval,
ScrapeTimeout: defaultScrapeTimeout,
HonorTimestamps: true,
ScrapeURL: "http://foo.bar:1234/metrics",
ScrapeInterval: defaultScrapeInterval,
ScrapeTimeout: defaultScrapeTimeout,
Labels: promutils.NewLabelsFromMap(map[string]string{
"instance": "foo.bar:1234",
"job": "foo",
@ -873,7 +863,7 @@ scrape_configs:
metrics_path: /foo/bar
scheme: https
honor_labels: true
honor_timestamps: false
honor_timestamps: true
follow_redirects: false
params:
p: ["x&y", "="]
@ -899,7 +889,7 @@ scrape_configs:
ScrapeInterval: 54 * time.Second,
ScrapeTimeout: 5 * time.Second,
HonorLabels: true,
HonorTimestamps: false,
HonorTimestamps: true,
DenyRedirects: true,
Labels: promutils.NewLabelsFromMap(map[string]string{
"instance": "foo.bar:443",
@ -916,7 +906,7 @@ scrape_configs:
ScrapeInterval: 54 * time.Second,
ScrapeTimeout: 5 * time.Second,
HonorLabels: true,
HonorTimestamps: false,
HonorTimestamps: true,
DenyRedirects: true,
Labels: promutils.NewLabelsFromMap(map[string]string{
"instance": "aaa:443",
@ -929,10 +919,9 @@ scrape_configs:
jobNameOriginal: "foo",
},
{
ScrapeURL: "http://1.2.3.4:80/metrics",
ScrapeInterval: 8 * time.Second,
ScrapeTimeout: 8 * time.Second,
HonorTimestamps: true,
ScrapeURL: "http://1.2.3.4:80/metrics",
ScrapeInterval: 8 * time.Second,
ScrapeTimeout: 8 * time.Second,
Labels: promutils.NewLabelsFromMap(map[string]string{
"instance": "1.2.3.4:80",
"job": "qwer",
@ -945,10 +934,9 @@ scrape_configs:
jobNameOriginal: "qwer",
},
{
ScrapeURL: "http://foobar:80/metrics",
ScrapeInterval: 8 * time.Second,
ScrapeTimeout: 8 * time.Second,
HonorTimestamps: true,
ScrapeURL: "http://foobar:80/metrics",
ScrapeInterval: 8 * time.Second,
ScrapeTimeout: 8 * time.Second,
Labels: promutils.NewLabelsFromMap(map[string]string{
"instance": "foobar:80",
"job": "asdf",
@ -995,10 +983,9 @@ scrape_configs:
- targets: ["foo.bar:1234", "drop-this-target"]
`, []*ScrapeWork{
{
ScrapeURL: "http://foo.bar:1234/metrics?x=keep_me",
ScrapeInterval: defaultScrapeInterval,
ScrapeTimeout: defaultScrapeTimeout,
HonorTimestamps: true,
ScrapeURL: "http://foo.bar:1234/metrics?x=keep_me",
ScrapeInterval: defaultScrapeInterval,
ScrapeTimeout: defaultScrapeTimeout,
Labels: promutils.NewLabelsFromMap(map[string]string{
"hash": "82",
"instance": "foo.bar:1234",
@ -1039,10 +1026,9 @@ scrape_configs:
- targets: ["foo.bar:1234"]
`, []*ScrapeWork{
{
ScrapeURL: "mailto://foo.bar:1234/abc.de?a=b",
ScrapeInterval: defaultScrapeInterval,
ScrapeTimeout: defaultScrapeTimeout,
HonorTimestamps: true,
ScrapeURL: "mailto://foo.bar:1234/abc.de?a=b",
ScrapeInterval: defaultScrapeInterval,
ScrapeTimeout: defaultScrapeTimeout,
Labels: promutils.NewLabelsFromMap(map[string]string{
"instance": "fake.addr",
"job": "https",
@ -1074,10 +1060,9 @@ scrape_configs:
- targets: ["foo.bar:1234", "xyz"]
`, []*ScrapeWork{
{
ScrapeURL: "http://foo.bar:1234/metrics",
ScrapeInterval: defaultScrapeInterval,
ScrapeTimeout: defaultScrapeTimeout,
HonorTimestamps: true,
ScrapeURL: "http://foo.bar:1234/metrics",
ScrapeInterval: defaultScrapeInterval,
ScrapeTimeout: defaultScrapeTimeout,
Labels: promutils.NewLabelsFromMap(map[string]string{
"instance": "foo.bar:1234",
"job": "3",
@ -1098,10 +1083,9 @@ scrape_configs:
- targets: ["foo.bar:1234"]
`, []*ScrapeWork{
{
ScrapeURL: "http://foo.bar:1234/metrics",
ScrapeInterval: defaultScrapeInterval,
ScrapeTimeout: defaultScrapeTimeout,
HonorTimestamps: true,
ScrapeURL: "http://foo.bar:1234/metrics",
ScrapeInterval: defaultScrapeInterval,
ScrapeTimeout: defaultScrapeTimeout,
Labels: promutils.NewLabelsFromMap(map[string]string{
"instance": "foo.bar:1234",
"job": "foo",
@ -1118,10 +1102,9 @@ scrape_configs:
- targets: ["foo.bar:1234"]
`, []*ScrapeWork{
{
ScrapeURL: "http://foo.bar:1234/metrics",
ScrapeInterval: defaultScrapeInterval,
ScrapeTimeout: defaultScrapeTimeout,
HonorTimestamps: true,
ScrapeURL: "http://foo.bar:1234/metrics",
ScrapeInterval: defaultScrapeInterval,
ScrapeTimeout: defaultScrapeTimeout,
Labels: promutils.NewLabelsFromMap(map[string]string{
"instance": "foo.bar:1234",
"job": "foo",
@ -1138,10 +1121,9 @@ scrape_configs:
- targets: ["foo.bar:1234"]
`, []*ScrapeWork{
{
ScrapeURL: "http://foo.bar:1234/metrics",
ScrapeInterval: defaultScrapeInterval,
ScrapeTimeout: defaultScrapeTimeout,
HonorTimestamps: true,
ScrapeURL: "http://foo.bar:1234/metrics",
ScrapeInterval: defaultScrapeInterval,
ScrapeTimeout: defaultScrapeTimeout,
Labels: promutils.NewLabelsFromMap(map[string]string{
"instance": "foo.bar:1234",
"job": "foo",
@ -1172,10 +1154,9 @@ scrape_configs:
job: yyy
`, []*ScrapeWork{
{
ScrapeURL: "http://pp:80/metrics?a=c&a=xy",
ScrapeInterval: defaultScrapeInterval,
ScrapeTimeout: defaultScrapeTimeout,
HonorTimestamps: true,
ScrapeURL: "http://pp:80/metrics?a=c&a=xy",
ScrapeInterval: defaultScrapeInterval,
ScrapeTimeout: defaultScrapeTimeout,
Labels: promutils.NewLabelsFromMap(map[string]string{
"foo": "bar",
"instance": "pp:80",
@ -1239,10 +1220,9 @@ scrape_configs:
replacement: true
`, []*ScrapeWork{
{
ScrapeURL: "http://127.0.0.1:9116/snmp?module=if_mib&target=192.168.1.2",
ScrapeInterval: defaultScrapeInterval,
ScrapeTimeout: defaultScrapeTimeout,
HonorTimestamps: true,
ScrapeURL: "http://127.0.0.1:9116/snmp?module=if_mib&target=192.168.1.2",
ScrapeInterval: defaultScrapeInterval,
ScrapeTimeout: defaultScrapeTimeout,
Labels: promutils.NewLabelsFromMap(map[string]string{
"instance": "192.168.1.2",
"job": "snmp",
@ -1269,10 +1249,9 @@ scrape_configs:
target_label: __metrics_path__
`, []*ScrapeWork{
{
ScrapeURL: "http://foo.bar:1234/metricspath",
ScrapeInterval: defaultScrapeInterval,
ScrapeTimeout: defaultScrapeTimeout,
HonorTimestamps: true,
ScrapeURL: "http://foo.bar:1234/metricspath",
ScrapeInterval: defaultScrapeInterval,
ScrapeTimeout: defaultScrapeTimeout,
Labels: promutils.NewLabelsFromMap(map[string]string{
"instance": "foo.bar:1234",
"job": "path wo slash",
@ -1300,7 +1279,6 @@ scrape_configs:
ScrapeTimeout: time.Hour * 24,
ScrapeAlignInterval: time.Hour * 24,
ScrapeOffset: time.Hour * 24 * 2,
HonorTimestamps: true,
NoStaleMarkers: true,
Labels: promutils.NewLabelsFromMap(map[string]string{
"instance": "foo.bar:1234",
@ -1340,16 +1318,14 @@ func TestScrapeConfigClone(t *testing.T) {
f(&ScrapeConfig{})
bFalse := false
var ie promrelabel.IfExpression
if err := ie.Parse(`{foo=~"bar",baz!="z"}`); err != nil {
t.Fatalf("unexpected error: %s", err)
}
f(&ScrapeConfig{
JobName: "foo",
ScrapeInterval: promutils.NewDuration(time.Second * 47),
HonorLabels: true,
HonorTimestamps: &bFalse,
JobName: "foo",
ScrapeInterval: promutils.NewDuration(time.Second * 47),
HonorLabels: true,
Params: map[string][]string{
"foo": {"bar", "baz"},
},