diff --git a/docs/CHANGELOG.md b/docs/CHANGELOG.md index 18039a3de..8dfe4242f 100644 --- a/docs/CHANGELOG.md +++ b/docs/CHANGELOG.md @@ -34,6 +34,7 @@ The following tip changes can be tested by building VictoriaMetrics components f * BUGFIX: [vmui](https://docs.victoriametrics.com/#vmui): add support for time zone selection for older versions of browsers. See [this pull request](https://github.com/VictoriaMetrics/VictoriaMetrics/pull/3680). * BUGFIX: [vmagent](https://docs.victoriametrics.com/vmagent.html): update API version for [ec2_sd_configs](https://docs.victoriametrics.com/sd_configs.html#ec2_sd_configs) to fix [the issue](https://github.com/VictoriaMetrics/VictoriaMetrics/issues/3700) with missing `__meta_ec2_availability_zone_id` attribute. * BUGFIX: [vmagent](https://docs.victoriametrics.com/vmagent.html): properly return `200 OK` HTTP status code when importing data via [Pushgateway protocol](https://docs.victoriametrics.com/#how-to-import-data-in-prometheus-exposition-format). See [this issue](https://github.com/VictoriaMetrics/VictoriaMetrics/issues/3636). +* BUGFIX: [vmagent](https://docs.victoriametrics.com/vmagent.html): do not add `exported_` prefix to scraped metric names, which clash with the [automatically generated metric names](https://docs.victoriametrics.com/vmagent.html#automatically-generated-metrics) if `honor_labels: true` option is set in the [scrape_config](https://docs.victoriametrics.com/sd_configs.html#scrape_configs). See the [this](https://github.com/VictoriaMetrics/VictoriaMetrics/issues/3557) and [this](https://github.com/VictoriaMetrics/VictoriaMetrics/issues/3406) issues. * BUGFIX: [vmauth](https://docs.victoriametrics.com/vmauth.html): allow re-entering authorization info in the web browser if the entered info was incorrect. Previously it was non-trivial to do via the web browser, since `vmauth` was returning `400 Bad Request` instead of `401 Unauthorized` http response code. * BUGFIX: [vmauth](https://docs.victoriametrics.com/vmauth.html): always log the client address and the requested URL on proxying errors. Previously some errors could miss this information. * BUGFIX: [vmbackup](https://docs.victoriametrics.com/vmbackup.html): fix snapshot not being deleted after backup completion. This issue could result in unnecessary snapshots being stored, it is required to delete unnecessary snapshots manually. See the [this issue](https://github.com/VictoriaMetrics/VictoriaMetrics/issues/3735). diff --git a/lib/promscrape/scrapework.go b/lib/promscrape/scrapework.go index db495a9a1..95f068b80 100644 --- a/lib/promscrape/scrapework.go +++ b/lib/promscrape/scrapework.go @@ -133,7 +133,7 @@ type ScrapeWork struct { // See https://docs.victoriametrics.com/vmagent.html#prometheus-staleness-markers NoStaleMarkers bool - //The Tenant Info + // The Tenant Info AuthToken *auth.Token // The original 'job_name' @@ -905,11 +905,13 @@ func (sw *scrapeWork) addAutoTimeseries(wc *writeRequestCtx, name string, value func (sw *scrapeWork) addRowToTimeseries(wc *writeRequestCtx, r *parser.Row, timestamp int64, needRelabel bool) { metric := r.Metric if needRelabel && isAutoMetric(metric) { - bb := bbPool.Get() - bb.B = append(bb.B, "exported_"...) - bb.B = append(bb.B, metric...) - metric = bytesutil.InternBytes(bb.B) - bbPool.Put(bb) + if !sw.Config.HonorLabels && len(r.Tags) == 0 { + bb := bbPool.Get() + bb.B = append(bb.B, "exported_"...) + bb.B = append(bb.B, metric...) + metric = bytesutil.InternBytes(bb.B) + bbPool.Put(bb) + } } labelsLen := len(wc.labels) targetLabels := sw.Config.Labels.GetLabels() diff --git a/lib/promscrape/scrapework_test.go b/lib/promscrape/scrapework_test.go index 3f5330e60..c45471528 100644 --- a/lib/promscrape/scrapework_test.go +++ b/lib/promscrape/scrapework_test.go @@ -361,9 +361,27 @@ func TestScrapeWorkScrapeInternalSuccess(t *testing.T) { `, &ScrapeWork{ ScrapeTimeout: time.Second * 42, }, ` - exported_up{bar="baz"} 34.44 123 - exported_scrape_series_added 3.435 123 + up{bar="baz"} 34.44 123 bar{a="b",c="d"} -3e4 123 + exported_scrape_series_added 3.435 123 + up 1 123 + scrape_duration_seconds 0 123 + scrape_samples_scraped 3 123 + scrape_samples_post_metric_relabeling 3 123 + scrape_timeout_seconds 42 123 + scrape_series_added 3 123 + `) + f(` + up{bar="baz"} 34.44 + bar{a="b",c="d"} -3e4 + scrape_series_added 3.435 + `, &ScrapeWork{ + ScrapeTimeout: time.Second * 42, + HonorLabels: true, + }, ` + up{bar="baz"} 34.44 123 + bar{a="b",c="d"} -3e4 123 + scrape_series_added 3.435 123 up 1 123 scrape_samples_scraped 3 123 scrape_duration_seconds 0 123