mirror of
https://github.com/VictoriaMetrics/VictoriaMetrics.git
synced 2025-01-10 15:14:09 +00:00
lib/promscrape: use local scrape timestamp for scraped metrics unless honor_timestamps: true
is set explicitly
This fixes the case with gaps for metrics collected from cadvisor, which exports invalid timestamps, which break staleness detection at VictoriaMetrics side. See https://github.com/VictoriaMetrics/VictoriaMetrics/issues/4697 , https://github.com/VictoriaMetrics/VictoriaMetrics/issues/4697#issuecomment-1654614799 and https://github.com/VictoriaMetrics/VictoriaMetrics/issues/4697#issuecomment-1656540535 Updates https://github.com/VictoriaMetrics/VictoriaMetrics/issues/1773
This commit is contained in:
parent
89ccf19b70
commit
ee98f9ae66
4 changed files with 75 additions and 100 deletions
|
@ -28,6 +28,7 @@ The following `tip` changes can be tested by building VictoriaMetrics components
|
|||
|
||||
Released at 2023-07-28
|
||||
|
||||
* BUGFIX: [vmagent](https://docs.victoriametrics.com/vmagent.html): use local scrape timestamps for the scraped metrics unless `honor_timestamps: true` option is explicitly set at [scrape_config](https://docs.victoriametrics.com/sd_configs.html#scrape_configs). This fixes gaps for metrics collected from [cadvisor](https://github.com/google/cadvisor) or similar exporters, which export metrics with invalid timestamps. See [this issue](https://github.com/VictoriaMetrics/VictoriaMetrics/issues/4697) and [this comment](https://github.com/VictoriaMetrics/VictoriaMetrics/issues/4697#issuecomment-1654614799) for details.
|
||||
* BUGFIX: [vmalert](https://docs.victoriametrics.com/vmalert.html): revert unit test feature for alerting and recording rules introduced in [this pull request](https://github.com/VictoriaMetrics/VictoriaMetrics/pull/4596). See the following [change](https://github.com/VictoriaMetrics/VictoriaMetrics/pull/4734).
|
||||
|
||||
## [v1.92.0](https://github.com/VictoriaMetrics/VictoriaMetrics/releases/tag/v1.92.0)
|
||||
|
|
|
@ -1445,7 +1445,8 @@ scrape_configs:
|
|||
# If honor_timestamps is set to "false", the timestamps of the metrics exposed
|
||||
# by the target will be ignored.
|
||||
#
|
||||
# By default, honor_timestamps is set to true.
|
||||
# By default, honor_timestamps is set to false.
|
||||
# See https://github.com/VictoriaMetrics/VictoriaMetrics/issues/4697#issuecomment-1656540535 for details.
|
||||
# honor_timestamps: <boolean>
|
||||
|
||||
# scheme configures the protocol scheme used for requests.
|
||||
|
|
|
@ -244,7 +244,7 @@ type ScrapeConfig struct {
|
|||
ScrapeTimeout *promutils.Duration `yaml:"scrape_timeout,omitempty"`
|
||||
MetricsPath string `yaml:"metrics_path,omitempty"`
|
||||
HonorLabels bool `yaml:"honor_labels,omitempty"`
|
||||
HonorTimestamps *bool `yaml:"honor_timestamps,omitempty"`
|
||||
HonorTimestamps bool `yaml:"honor_timestamps,omitempty"`
|
||||
Scheme string `yaml:"scheme,omitempty"`
|
||||
Params map[string][]string `yaml:"params,omitempty"`
|
||||
HTTPClientConfig promauth.HTTPClientConfig `yaml:",inline"`
|
||||
|
@ -984,10 +984,7 @@ func getScrapeWorkConfig(sc *ScrapeConfig, baseDir string, globalCfg *GlobalConf
|
|||
scrapeTimeout = scrapeInterval
|
||||
}
|
||||
honorLabels := sc.HonorLabels
|
||||
honorTimestamps := true
|
||||
if sc.HonorTimestamps != nil {
|
||||
honorTimestamps = *sc.HonorTimestamps
|
||||
}
|
||||
honorTimestamps := sc.HonorTimestamps
|
||||
denyRedirects := false
|
||||
if sc.HTTPClientConfig.FollowRedirects != nil {
|
||||
denyRedirects = !*sc.HTTPClientConfig.FollowRedirects
|
||||
|
|
|
@ -89,7 +89,7 @@ scrape_configs:
|
|||
scrape_configs:
|
||||
- job_name: foo
|
||||
honor_labels: true
|
||||
honor_timestamps: false
|
||||
honor_timestamps: true
|
||||
scheme: https
|
||||
params:
|
||||
foo:
|
||||
|
@ -243,10 +243,9 @@ scrape_configs:
|
|||
resetNonEssentialFields(sws)
|
||||
swsExpected := []*ScrapeWork{
|
||||
{
|
||||
ScrapeURL: "http://host1:80/metric/path1?x=y",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
ScrapeURL: "http://host1:80/metric/path1?x=y",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
Labels: promutils.NewLabelsFromMap(map[string]string{
|
||||
"instance": "host1:80",
|
||||
"job": "abc",
|
||||
|
@ -256,10 +255,9 @@ scrape_configs:
|
|||
jobNameOriginal: "abc",
|
||||
},
|
||||
{
|
||||
ScrapeURL: "https://host2:443/metric/path2?x=y",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
ScrapeURL: "https://host2:443/metric/path2?x=y",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
Labels: promutils.NewLabelsFromMap(map[string]string{
|
||||
"instance": "host2:443",
|
||||
"job": "abc",
|
||||
|
@ -269,10 +267,9 @@ scrape_configs:
|
|||
jobNameOriginal: "abc",
|
||||
},
|
||||
{
|
||||
ScrapeURL: "http://host3:1234/metric/path3?arg1=value1&x=y",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
ScrapeURL: "http://host3:1234/metric/path3?arg1=value1&x=y",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
Labels: promutils.NewLabelsFromMap(map[string]string{
|
||||
"instance": "host3:1234",
|
||||
"job": "abc",
|
||||
|
@ -282,10 +279,9 @@ scrape_configs:
|
|||
jobNameOriginal: "abc",
|
||||
},
|
||||
{
|
||||
ScrapeURL: "https://host4:1234/foo/bar?x=y",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
ScrapeURL: "https://host4:1234/foo/bar?x=y",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
Labels: promutils.NewLabelsFromMap(map[string]string{
|
||||
"instance": "host4:1234",
|
||||
"job": "abc",
|
||||
|
@ -330,10 +326,9 @@ scrape_configs:
|
|||
sws := cfg.getStaticScrapeWork()
|
||||
resetNonEssentialFields(sws)
|
||||
swsExpected := []*ScrapeWork{{
|
||||
ScrapeURL: "http://black:9115/probe?module=dns_udp_example&target=8.8.8.8",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
ScrapeURL: "http://black:9115/probe?module=dns_udp_example&target=8.8.8.8",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
Labels: promutils.NewLabelsFromMap(map[string]string{
|
||||
"instance": "8.8.8.8",
|
||||
"job": "blackbox",
|
||||
|
@ -757,10 +752,9 @@ scrape_configs:
|
|||
- files: ["testdata/file_sd.json", "testdata/file_sd*.yml"]
|
||||
`, []*ScrapeWork{
|
||||
{
|
||||
ScrapeURL: "http://host1:80/abc/de",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
ScrapeURL: "http://host1:80/abc/de",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
Labels: promutils.NewLabelsFromMap(map[string]string{
|
||||
"instance": "host1:80",
|
||||
"job": "foo",
|
||||
|
@ -771,10 +765,9 @@ scrape_configs:
|
|||
jobNameOriginal: "foo",
|
||||
},
|
||||
{
|
||||
ScrapeURL: "http://host2:80/abc/de",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
ScrapeURL: "http://host2:80/abc/de",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
Labels: promutils.NewLabelsFromMap(map[string]string{
|
||||
"instance": "host2:80",
|
||||
"job": "foo",
|
||||
|
@ -785,10 +778,9 @@ scrape_configs:
|
|||
jobNameOriginal: "foo",
|
||||
},
|
||||
{
|
||||
ScrapeURL: "http://localhost:9090/abc/de",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
ScrapeURL: "http://localhost:9090/abc/de",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
Labels: promutils.NewLabelsFromMap(map[string]string{
|
||||
"instance": "localhost:9090",
|
||||
"job": "foo",
|
||||
|
@ -821,10 +813,9 @@ scrape_configs:
|
|||
- targets: ["foo.bar:1234"]
|
||||
`, []*ScrapeWork{
|
||||
{
|
||||
ScrapeURL: "http://foo.bar:1234/metrics",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
ScrapeURL: "http://foo.bar:1234/metrics",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
Labels: promutils.NewLabelsFromMap(map[string]string{
|
||||
"instance": "foo.bar:1234",
|
||||
"job": "foo",
|
||||
|
@ -845,10 +836,9 @@ scrape_configs:
|
|||
- targets: ["foo.bar:1234"]
|
||||
`, []*ScrapeWork{
|
||||
{
|
||||
ScrapeURL: "http://foo.bar:1234/metrics",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
ScrapeURL: "http://foo.bar:1234/metrics",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
Labels: promutils.NewLabelsFromMap(map[string]string{
|
||||
"instance": "foo.bar:1234",
|
||||
"job": "foo",
|
||||
|
@ -873,7 +863,7 @@ scrape_configs:
|
|||
metrics_path: /foo/bar
|
||||
scheme: https
|
||||
honor_labels: true
|
||||
honor_timestamps: false
|
||||
honor_timestamps: true
|
||||
follow_redirects: false
|
||||
params:
|
||||
p: ["x&y", "="]
|
||||
|
@ -899,7 +889,7 @@ scrape_configs:
|
|||
ScrapeInterval: 54 * time.Second,
|
||||
ScrapeTimeout: 5 * time.Second,
|
||||
HonorLabels: true,
|
||||
HonorTimestamps: false,
|
||||
HonorTimestamps: true,
|
||||
DenyRedirects: true,
|
||||
Labels: promutils.NewLabelsFromMap(map[string]string{
|
||||
"instance": "foo.bar:443",
|
||||
|
@ -916,7 +906,7 @@ scrape_configs:
|
|||
ScrapeInterval: 54 * time.Second,
|
||||
ScrapeTimeout: 5 * time.Second,
|
||||
HonorLabels: true,
|
||||
HonorTimestamps: false,
|
||||
HonorTimestamps: true,
|
||||
DenyRedirects: true,
|
||||
Labels: promutils.NewLabelsFromMap(map[string]string{
|
||||
"instance": "aaa:443",
|
||||
|
@ -929,10 +919,9 @@ scrape_configs:
|
|||
jobNameOriginal: "foo",
|
||||
},
|
||||
{
|
||||
ScrapeURL: "http://1.2.3.4:80/metrics",
|
||||
ScrapeInterval: 8 * time.Second,
|
||||
ScrapeTimeout: 8 * time.Second,
|
||||
HonorTimestamps: true,
|
||||
ScrapeURL: "http://1.2.3.4:80/metrics",
|
||||
ScrapeInterval: 8 * time.Second,
|
||||
ScrapeTimeout: 8 * time.Second,
|
||||
Labels: promutils.NewLabelsFromMap(map[string]string{
|
||||
"instance": "1.2.3.4:80",
|
||||
"job": "qwer",
|
||||
|
@ -945,10 +934,9 @@ scrape_configs:
|
|||
jobNameOriginal: "qwer",
|
||||
},
|
||||
{
|
||||
ScrapeURL: "http://foobar:80/metrics",
|
||||
ScrapeInterval: 8 * time.Second,
|
||||
ScrapeTimeout: 8 * time.Second,
|
||||
HonorTimestamps: true,
|
||||
ScrapeURL: "http://foobar:80/metrics",
|
||||
ScrapeInterval: 8 * time.Second,
|
||||
ScrapeTimeout: 8 * time.Second,
|
||||
Labels: promutils.NewLabelsFromMap(map[string]string{
|
||||
"instance": "foobar:80",
|
||||
"job": "asdf",
|
||||
|
@ -995,10 +983,9 @@ scrape_configs:
|
|||
- targets: ["foo.bar:1234", "drop-this-target"]
|
||||
`, []*ScrapeWork{
|
||||
{
|
||||
ScrapeURL: "http://foo.bar:1234/metrics?x=keep_me",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
ScrapeURL: "http://foo.bar:1234/metrics?x=keep_me",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
Labels: promutils.NewLabelsFromMap(map[string]string{
|
||||
"hash": "82",
|
||||
"instance": "foo.bar:1234",
|
||||
|
@ -1039,10 +1026,9 @@ scrape_configs:
|
|||
- targets: ["foo.bar:1234"]
|
||||
`, []*ScrapeWork{
|
||||
{
|
||||
ScrapeURL: "mailto://foo.bar:1234/abc.de?a=b",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
ScrapeURL: "mailto://foo.bar:1234/abc.de?a=b",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
Labels: promutils.NewLabelsFromMap(map[string]string{
|
||||
"instance": "fake.addr",
|
||||
"job": "https",
|
||||
|
@ -1074,10 +1060,9 @@ scrape_configs:
|
|||
- targets: ["foo.bar:1234", "xyz"]
|
||||
`, []*ScrapeWork{
|
||||
{
|
||||
ScrapeURL: "http://foo.bar:1234/metrics",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
ScrapeURL: "http://foo.bar:1234/metrics",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
Labels: promutils.NewLabelsFromMap(map[string]string{
|
||||
"instance": "foo.bar:1234",
|
||||
"job": "3",
|
||||
|
@ -1098,10 +1083,9 @@ scrape_configs:
|
|||
- targets: ["foo.bar:1234"]
|
||||
`, []*ScrapeWork{
|
||||
{
|
||||
ScrapeURL: "http://foo.bar:1234/metrics",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
ScrapeURL: "http://foo.bar:1234/metrics",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
Labels: promutils.NewLabelsFromMap(map[string]string{
|
||||
"instance": "foo.bar:1234",
|
||||
"job": "foo",
|
||||
|
@ -1118,10 +1102,9 @@ scrape_configs:
|
|||
- targets: ["foo.bar:1234"]
|
||||
`, []*ScrapeWork{
|
||||
{
|
||||
ScrapeURL: "http://foo.bar:1234/metrics",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
ScrapeURL: "http://foo.bar:1234/metrics",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
Labels: promutils.NewLabelsFromMap(map[string]string{
|
||||
"instance": "foo.bar:1234",
|
||||
"job": "foo",
|
||||
|
@ -1138,10 +1121,9 @@ scrape_configs:
|
|||
- targets: ["foo.bar:1234"]
|
||||
`, []*ScrapeWork{
|
||||
{
|
||||
ScrapeURL: "http://foo.bar:1234/metrics",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
ScrapeURL: "http://foo.bar:1234/metrics",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
Labels: promutils.NewLabelsFromMap(map[string]string{
|
||||
"instance": "foo.bar:1234",
|
||||
"job": "foo",
|
||||
|
@ -1172,10 +1154,9 @@ scrape_configs:
|
|||
job: yyy
|
||||
`, []*ScrapeWork{
|
||||
{
|
||||
ScrapeURL: "http://pp:80/metrics?a=c&a=xy",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
ScrapeURL: "http://pp:80/metrics?a=c&a=xy",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
Labels: promutils.NewLabelsFromMap(map[string]string{
|
||||
"foo": "bar",
|
||||
"instance": "pp:80",
|
||||
|
@ -1239,10 +1220,9 @@ scrape_configs:
|
|||
replacement: true
|
||||
`, []*ScrapeWork{
|
||||
{
|
||||
ScrapeURL: "http://127.0.0.1:9116/snmp?module=if_mib&target=192.168.1.2",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
ScrapeURL: "http://127.0.0.1:9116/snmp?module=if_mib&target=192.168.1.2",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
Labels: promutils.NewLabelsFromMap(map[string]string{
|
||||
"instance": "192.168.1.2",
|
||||
"job": "snmp",
|
||||
|
@ -1269,10 +1249,9 @@ scrape_configs:
|
|||
target_label: __metrics_path__
|
||||
`, []*ScrapeWork{
|
||||
{
|
||||
ScrapeURL: "http://foo.bar:1234/metricspath",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
ScrapeURL: "http://foo.bar:1234/metricspath",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
Labels: promutils.NewLabelsFromMap(map[string]string{
|
||||
"instance": "foo.bar:1234",
|
||||
"job": "path wo slash",
|
||||
|
@ -1300,7 +1279,6 @@ scrape_configs:
|
|||
ScrapeTimeout: time.Hour * 24,
|
||||
ScrapeAlignInterval: time.Hour * 24,
|
||||
ScrapeOffset: time.Hour * 24 * 2,
|
||||
HonorTimestamps: true,
|
||||
NoStaleMarkers: true,
|
||||
Labels: promutils.NewLabelsFromMap(map[string]string{
|
||||
"instance": "foo.bar:1234",
|
||||
|
@ -1340,16 +1318,14 @@ func TestScrapeConfigClone(t *testing.T) {
|
|||
|
||||
f(&ScrapeConfig{})
|
||||
|
||||
bFalse := false
|
||||
var ie promrelabel.IfExpression
|
||||
if err := ie.Parse(`{foo=~"bar",baz!="z"}`); err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
f(&ScrapeConfig{
|
||||
JobName: "foo",
|
||||
ScrapeInterval: promutils.NewDuration(time.Second * 47),
|
||||
HonorLabels: true,
|
||||
HonorTimestamps: &bFalse,
|
||||
JobName: "foo",
|
||||
ScrapeInterval: promutils.NewDuration(time.Second * 47),
|
||||
HonorLabels: true,
|
||||
Params: map[string][]string{
|
||||
"foo": {"bar", "baz"},
|
||||
},
|
||||
|
|
Loading…
Reference in a new issue