mirror of
https://github.com/VictoriaMetrics/VictoriaMetrics.git
synced 2025-03-11 15:34:56 +00:00
lib/promscrape: use local scrape timestamp for scraped metrics unless honor_timestamps: true
is set explicitly
This fixes the case with gaps for metrics collected from cadvisor, which exports invalid timestamps, which break staleness detection at VictoriaMetrics side. See https://github.com/VictoriaMetrics/VictoriaMetrics/issues/4697 , https://github.com/VictoriaMetrics/VictoriaMetrics/issues/4697#issuecomment-1654614799 and https://github.com/VictoriaMetrics/VictoriaMetrics/issues/4697#issuecomment-1656540535 Updates https://github.com/VictoriaMetrics/VictoriaMetrics/issues/1773
This commit is contained in:
parent
89ccf19b70
commit
ee98f9ae66
4 changed files with 75 additions and 100 deletions
|
@ -28,6 +28,7 @@ The following `tip` changes can be tested by building VictoriaMetrics components
|
|||
|
||||
Released at 2023-07-28
|
||||
|
||||
* BUGFIX: [vmagent](https://docs.victoriametrics.com/vmagent.html): use local scrape timestamps for the scraped metrics unless `honor_timestamps: true` option is explicitly set at [scrape_config](https://docs.victoriametrics.com/sd_configs.html#scrape_configs). This fixes gaps for metrics collected from [cadvisor](https://github.com/google/cadvisor) or similar exporters, which export metrics with invalid timestamps. See [this issue](https://github.com/VictoriaMetrics/VictoriaMetrics/issues/4697) and [this comment](https://github.com/VictoriaMetrics/VictoriaMetrics/issues/4697#issuecomment-1654614799) for details.
|
||||
* BUGFIX: [vmalert](https://docs.victoriametrics.com/vmalert.html): revert unit test feature for alerting and recording rules introduced in [this pull request](https://github.com/VictoriaMetrics/VictoriaMetrics/pull/4596). See the following [change](https://github.com/VictoriaMetrics/VictoriaMetrics/pull/4734).
|
||||
|
||||
## [v1.92.0](https://github.com/VictoriaMetrics/VictoriaMetrics/releases/tag/v1.92.0)
|
||||
|
|
|
@ -1445,7 +1445,8 @@ scrape_configs:
|
|||
# If honor_timestamps is set to "false", the timestamps of the metrics exposed
|
||||
# by the target will be ignored.
|
||||
#
|
||||
# By default, honor_timestamps is set to true.
|
||||
# By default, honor_timestamps is set to false.
|
||||
# See https://github.com/VictoriaMetrics/VictoriaMetrics/issues/4697#issuecomment-1656540535 for details.
|
||||
# honor_timestamps: <boolean>
|
||||
|
||||
# scheme configures the protocol scheme used for requests.
|
||||
|
|
|
@ -244,7 +244,7 @@ type ScrapeConfig struct {
|
|||
ScrapeTimeout *promutils.Duration `yaml:"scrape_timeout,omitempty"`
|
||||
MetricsPath string `yaml:"metrics_path,omitempty"`
|
||||
HonorLabels bool `yaml:"honor_labels,omitempty"`
|
||||
HonorTimestamps *bool `yaml:"honor_timestamps,omitempty"`
|
||||
HonorTimestamps bool `yaml:"honor_timestamps,omitempty"`
|
||||
Scheme string `yaml:"scheme,omitempty"`
|
||||
Params map[string][]string `yaml:"params,omitempty"`
|
||||
HTTPClientConfig promauth.HTTPClientConfig `yaml:",inline"`
|
||||
|
@ -984,10 +984,7 @@ func getScrapeWorkConfig(sc *ScrapeConfig, baseDir string, globalCfg *GlobalConf
|
|||
scrapeTimeout = scrapeInterval
|
||||
}
|
||||
honorLabels := sc.HonorLabels
|
||||
honorTimestamps := true
|
||||
if sc.HonorTimestamps != nil {
|
||||
honorTimestamps = *sc.HonorTimestamps
|
||||
}
|
||||
honorTimestamps := sc.HonorTimestamps
|
||||
denyRedirects := false
|
||||
if sc.HTTPClientConfig.FollowRedirects != nil {
|
||||
denyRedirects = !*sc.HTTPClientConfig.FollowRedirects
|
||||
|
|
|
@ -89,7 +89,7 @@ scrape_configs:
|
|||
scrape_configs:
|
||||
- job_name: foo
|
||||
honor_labels: true
|
||||
honor_timestamps: false
|
||||
honor_timestamps: true
|
||||
scheme: https
|
||||
params:
|
||||
foo:
|
||||
|
@ -246,7 +246,6 @@ scrape_configs:
|
|||
ScrapeURL: "http://host1:80/metric/path1?x=y",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
Labels: promutils.NewLabelsFromMap(map[string]string{
|
||||
"instance": "host1:80",
|
||||
"job": "abc",
|
||||
|
@ -259,7 +258,6 @@ scrape_configs:
|
|||
ScrapeURL: "https://host2:443/metric/path2?x=y",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
Labels: promutils.NewLabelsFromMap(map[string]string{
|
||||
"instance": "host2:443",
|
||||
"job": "abc",
|
||||
|
@ -272,7 +270,6 @@ scrape_configs:
|
|||
ScrapeURL: "http://host3:1234/metric/path3?arg1=value1&x=y",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
Labels: promutils.NewLabelsFromMap(map[string]string{
|
||||
"instance": "host3:1234",
|
||||
"job": "abc",
|
||||
|
@ -285,7 +282,6 @@ scrape_configs:
|
|||
ScrapeURL: "https://host4:1234/foo/bar?x=y",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
Labels: promutils.NewLabelsFromMap(map[string]string{
|
||||
"instance": "host4:1234",
|
||||
"job": "abc",
|
||||
|
@ -333,7 +329,6 @@ scrape_configs:
|
|||
ScrapeURL: "http://black:9115/probe?module=dns_udp_example&target=8.8.8.8",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
Labels: promutils.NewLabelsFromMap(map[string]string{
|
||||
"instance": "8.8.8.8",
|
||||
"job": "blackbox",
|
||||
|
@ -760,7 +755,6 @@ scrape_configs:
|
|||
ScrapeURL: "http://host1:80/abc/de",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
Labels: promutils.NewLabelsFromMap(map[string]string{
|
||||
"instance": "host1:80",
|
||||
"job": "foo",
|
||||
|
@ -774,7 +768,6 @@ scrape_configs:
|
|||
ScrapeURL: "http://host2:80/abc/de",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
Labels: promutils.NewLabelsFromMap(map[string]string{
|
||||
"instance": "host2:80",
|
||||
"job": "foo",
|
||||
|
@ -788,7 +781,6 @@ scrape_configs:
|
|||
ScrapeURL: "http://localhost:9090/abc/de",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
Labels: promutils.NewLabelsFromMap(map[string]string{
|
||||
"instance": "localhost:9090",
|
||||
"job": "foo",
|
||||
|
@ -824,7 +816,6 @@ scrape_configs:
|
|||
ScrapeURL: "http://foo.bar:1234/metrics",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
Labels: promutils.NewLabelsFromMap(map[string]string{
|
||||
"instance": "foo.bar:1234",
|
||||
"job": "foo",
|
||||
|
@ -848,7 +839,6 @@ scrape_configs:
|
|||
ScrapeURL: "http://foo.bar:1234/metrics",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
Labels: promutils.NewLabelsFromMap(map[string]string{
|
||||
"instance": "foo.bar:1234",
|
||||
"job": "foo",
|
||||
|
@ -873,7 +863,7 @@ scrape_configs:
|
|||
metrics_path: /foo/bar
|
||||
scheme: https
|
||||
honor_labels: true
|
||||
honor_timestamps: false
|
||||
honor_timestamps: true
|
||||
follow_redirects: false
|
||||
params:
|
||||
p: ["x&y", "="]
|
||||
|
@ -899,7 +889,7 @@ scrape_configs:
|
|||
ScrapeInterval: 54 * time.Second,
|
||||
ScrapeTimeout: 5 * time.Second,
|
||||
HonorLabels: true,
|
||||
HonorTimestamps: false,
|
||||
HonorTimestamps: true,
|
||||
DenyRedirects: true,
|
||||
Labels: promutils.NewLabelsFromMap(map[string]string{
|
||||
"instance": "foo.bar:443",
|
||||
|
@ -916,7 +906,7 @@ scrape_configs:
|
|||
ScrapeInterval: 54 * time.Second,
|
||||
ScrapeTimeout: 5 * time.Second,
|
||||
HonorLabels: true,
|
||||
HonorTimestamps: false,
|
||||
HonorTimestamps: true,
|
||||
DenyRedirects: true,
|
||||
Labels: promutils.NewLabelsFromMap(map[string]string{
|
||||
"instance": "aaa:443",
|
||||
|
@ -932,7 +922,6 @@ scrape_configs:
|
|||
ScrapeURL: "http://1.2.3.4:80/metrics",
|
||||
ScrapeInterval: 8 * time.Second,
|
||||
ScrapeTimeout: 8 * time.Second,
|
||||
HonorTimestamps: true,
|
||||
Labels: promutils.NewLabelsFromMap(map[string]string{
|
||||
"instance": "1.2.3.4:80",
|
||||
"job": "qwer",
|
||||
|
@ -948,7 +937,6 @@ scrape_configs:
|
|||
ScrapeURL: "http://foobar:80/metrics",
|
||||
ScrapeInterval: 8 * time.Second,
|
||||
ScrapeTimeout: 8 * time.Second,
|
||||
HonorTimestamps: true,
|
||||
Labels: promutils.NewLabelsFromMap(map[string]string{
|
||||
"instance": "foobar:80",
|
||||
"job": "asdf",
|
||||
|
@ -998,7 +986,6 @@ scrape_configs:
|
|||
ScrapeURL: "http://foo.bar:1234/metrics?x=keep_me",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
Labels: promutils.NewLabelsFromMap(map[string]string{
|
||||
"hash": "82",
|
||||
"instance": "foo.bar:1234",
|
||||
|
@ -1042,7 +1029,6 @@ scrape_configs:
|
|||
ScrapeURL: "mailto://foo.bar:1234/abc.de?a=b",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
Labels: promutils.NewLabelsFromMap(map[string]string{
|
||||
"instance": "fake.addr",
|
||||
"job": "https",
|
||||
|
@ -1077,7 +1063,6 @@ scrape_configs:
|
|||
ScrapeURL: "http://foo.bar:1234/metrics",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
Labels: promutils.NewLabelsFromMap(map[string]string{
|
||||
"instance": "foo.bar:1234",
|
||||
"job": "3",
|
||||
|
@ -1101,7 +1086,6 @@ scrape_configs:
|
|||
ScrapeURL: "http://foo.bar:1234/metrics",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
Labels: promutils.NewLabelsFromMap(map[string]string{
|
||||
"instance": "foo.bar:1234",
|
||||
"job": "foo",
|
||||
|
@ -1121,7 +1105,6 @@ scrape_configs:
|
|||
ScrapeURL: "http://foo.bar:1234/metrics",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
Labels: promutils.NewLabelsFromMap(map[string]string{
|
||||
"instance": "foo.bar:1234",
|
||||
"job": "foo",
|
||||
|
@ -1141,7 +1124,6 @@ scrape_configs:
|
|||
ScrapeURL: "http://foo.bar:1234/metrics",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
Labels: promutils.NewLabelsFromMap(map[string]string{
|
||||
"instance": "foo.bar:1234",
|
||||
"job": "foo",
|
||||
|
@ -1175,7 +1157,6 @@ scrape_configs:
|
|||
ScrapeURL: "http://pp:80/metrics?a=c&a=xy",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
Labels: promutils.NewLabelsFromMap(map[string]string{
|
||||
"foo": "bar",
|
||||
"instance": "pp:80",
|
||||
|
@ -1242,7 +1223,6 @@ scrape_configs:
|
|||
ScrapeURL: "http://127.0.0.1:9116/snmp?module=if_mib&target=192.168.1.2",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
Labels: promutils.NewLabelsFromMap(map[string]string{
|
||||
"instance": "192.168.1.2",
|
||||
"job": "snmp",
|
||||
|
@ -1272,7 +1252,6 @@ scrape_configs:
|
|||
ScrapeURL: "http://foo.bar:1234/metricspath",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
Labels: promutils.NewLabelsFromMap(map[string]string{
|
||||
"instance": "foo.bar:1234",
|
||||
"job": "path wo slash",
|
||||
|
@ -1300,7 +1279,6 @@ scrape_configs:
|
|||
ScrapeTimeout: time.Hour * 24,
|
||||
ScrapeAlignInterval: time.Hour * 24,
|
||||
ScrapeOffset: time.Hour * 24 * 2,
|
||||
HonorTimestamps: true,
|
||||
NoStaleMarkers: true,
|
||||
Labels: promutils.NewLabelsFromMap(map[string]string{
|
||||
"instance": "foo.bar:1234",
|
||||
|
@ -1340,7 +1318,6 @@ func TestScrapeConfigClone(t *testing.T) {
|
|||
|
||||
f(&ScrapeConfig{})
|
||||
|
||||
bFalse := false
|
||||
var ie promrelabel.IfExpression
|
||||
if err := ie.Parse(`{foo=~"bar",baz!="z"}`); err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
|
@ -1349,7 +1326,6 @@ func TestScrapeConfigClone(t *testing.T) {
|
|||
JobName: "foo",
|
||||
ScrapeInterval: promutils.NewDuration(time.Second * 47),
|
||||
HonorLabels: true,
|
||||
HonorTimestamps: &bFalse,
|
||||
Params: map[string][]string{
|
||||
"foo": {"bar", "baz"},
|
||||
},
|
||||
|
|
Loading…
Reference in a new issue