diff --git a/docs/CHANGELOG.md b/docs/CHANGELOG.md index 61d16bcdf..78c98a4ce 100644 --- a/docs/CHANGELOG.md +++ b/docs/CHANGELOG.md @@ -16,7 +16,8 @@ sort: 15 * FEATURE: [vmalert](https://docs.victoriametrics.com/vmalert.html): add ability to configure notifiers (e.g. alertmanager) via a file in the way similar to Prometheus. See [these docs](https://docs.victoriametrics.com/vmalert.html#notifier-configuration-file), [this pull request](https://github.com/VictoriaMetrics/VictoriaMetrics/pull/2127). * FEATURE: [vmalert](https://docs.victoriametrics.com/vmalert.html): add support for Consul service discovery for notifiers. See [this issue](https://github.com/VictoriaMetrics/VictoriaMetrics/issues/1947). * FEATURE: [vmalert](https://docs.victoriametrics.com/vmalert.html): add support for specifying Basic Auth password for notifiers via a file. See [this issue](https://github.com/VictoriaMetrics/VictoriaMetrics/issues/1567). -* FEATURE: [vmagent](https://docs.victoriametrics.com/vmagent.html): provide the ability to fetch target responses on behalf of `vmagent`. Click `fetch response` link for the needed target at `/targets` page. This feature may be useful for debugging responses from targets located in isolated environments. +* FEATURE: [vmagent](https://docs.victoriametrics.com/vmagent.html): provide the ability to fetch target responses on behalf of `vmagent` by clicking the `response` link for the needed target at `/targets` page. This feature may be useful for debugging responses from targets located in isolated environments. +* FEATURE: [vmagent](https://docs.victoriametrics.com/vmagent.html): show the total number of scrapes and the total number of scrape errors per target at `/targets` page. This information may be useful when debugging unreliable scrape targets. * BUGFIX: return proper results from `highestMax()` function at [Graphite render API](https://docs.victoriametrics.com/#graphite-render-api-usage). Previously it was incorrectly returning timeseries with min peaks instead of max peaks. * BUGFIX: properly limit indexdb cache sizes. Previously they could exceed values set via `-memory.allowedPercent` and/or `-memory.allowedBytes` when `indexdb` contained many data parts. See [this issue](https://github.com/VictoriaMetrics/VictoriaMetrics/issues/2007). diff --git a/lib/promscrape/targets_response.qtpl b/lib/promscrape/targets_response.qtpl index 9e50fbff7..1d50802ee 100644 --- a/lib/promscrape/targets_response.qtpl +++ b/lib/promscrape/targets_response.qtpl @@ -1,5 +1,8 @@ -{% import "github.com/VictoriaMetrics/VictoriaMetrics/lib/prompbmarshal" -%} +{% import ( + "time" + "github.com/VictoriaMetrics/VictoriaMetrics/lib/prompbmarshal" + "github.com/VictoriaMetrics/VictoriaMetrics/lib/promrelabel" +) %} {% stripspace %} @@ -9,18 +12,17 @@ job={%q= js.job %} ({%d js.upCount %}/{%d js.targetsTotal %}{% space %}up) {% newline %} {% for _, ts := range js.targetsStatus %} - {% code - labels := promLabelsString(ts.labels) - ol := promLabelsString(ts.originalLabels) - %} -{%s= "\t" %}state={% if ts.up %}up{% else %}down{% endif %},{% space %} - endpoint={%s= ts.endpoint %},{% space %} - labels={%s= labels %} - {% if showOriginLabels %}, originalLabels={%s= ol %}{% endif %},{% space %} - last_scrape={%f.3 ts.lastScrapeTime.Seconds() %}s ago,{% space %} - scrape_duration={%f.3 ts.scrapeDuration.Seconds() %}s,{% space %} +{%s= "\t" %} + state={% if ts.up %}up{% else %}down{% endif %},{% space %} + endpoint={%s= ts.sw.Config.ScrapeURL %},{% space %} + labels={%s= promLabelsString(promrelabel.FinalizeLabels(nil, ts.sw.Config.Labels)) %},{% space %} + {% if showOriginLabels %}originalLabels={%s= promLabelsString(ts.sw.Config.OriginalLabels) %},{% space %}{% endif %} + scrapes_total={%d ts.scrapesTotal %},{% space %} + scrapes_failed={%d ts.scrapesFailed %},{% space %} + last_scrape={%f.3 ts.getDurationFromLastScrape().Seconds() %}s ago,{% space %} + scrape_duration={%d int(ts.scrapeDuration) %}ms,{% space %} samples_scraped={%d ts.samplesScraped %},{% space %} - error={%q= ts.errMsg %} + error={% if ts.err != nil %}{%s= ts.err.Error() %}{% endif %} {% newline %} {% endfor %} {% endfor %} @@ -65,33 +67,47 @@ job={%q= jobName %} (0/0 up)
Endpoint | State | Labels | Last Scrape | Scrape Duration | Samples Scraped | Error |
---|
Endpoint | State | Labels | Scrapes | Errors | Last Scrape | Duration | Samples | Last error |
---|---|---|---|---|---|---|---|---|
`) -//line lib/promscrape/targets_response.qtpl:79 - qw422016.E().S(ts.endpoint) -//line lib/promscrape/targets_response.qtpl:79 +//line lib/promscrape/targets_response.qtpl:88 + qw422016.E().S(endpoint) +//line lib/promscrape/targets_response.qtpl:88 + qw422016.N().S(`" target="_blank">`) +//line lib/promscrape/targets_response.qtpl:88 + qw422016.E().S(endpoint) +//line lib/promscrape/targets_response.qtpl:88 qw422016.N().S(` (fetch response) | `) -//line lib/promscrape/targets_response.qtpl:82 +//line lib/promscrape/targets_response.qtpl:89 + qw422016.E().S(targetID) +//line lib/promscrape/targets_response.qtpl:89 + qw422016.N().S(`" target="_blank" title="click to fetch target response on behalf of the scraper">response) | `) +//line lib/promscrape/targets_response.qtpl:91 if ts.up { -//line lib/promscrape/targets_response.qtpl:82 +//line lib/promscrape/targets_response.qtpl:91 qw422016.N().S(`UP`) -//line lib/promscrape/targets_response.qtpl:82 +//line lib/promscrape/targets_response.qtpl:91 } else { -//line lib/promscrape/targets_response.qtpl:82 +//line lib/promscrape/targets_response.qtpl:91 qw422016.N().S(`DOWN`) -//line lib/promscrape/targets_response.qtpl:82 +//line lib/promscrape/targets_response.qtpl:91 } -//line lib/promscrape/targets_response.qtpl:82 - qw422016.N().S(` | `) -//line lib/promscrape/targets_response.qtpl:84 - qw422016.N().S(` `) -//line lib/promscrape/targets_response.qtpl:85 - streamformatLabel(qw422016, ts.labels) -//line lib/promscrape/targets_response.qtpl:85 - qw422016.N().S(` | `)
+//line lib/promscrape/targets_response.qtpl:94
+ streamformatLabel(qw422016, promrelabel.FinalizeLabels(nil, ts.sw.Config.Labels))
+//line lib/promscrape/targets_response.qtpl:94
+ qw422016.N().S(` | `) -//line lib/promscrape/targets_response.qtpl:91 - qw422016.N().FPrec(ts.lastScrapeTime.Seconds(), 3) -//line lib/promscrape/targets_response.qtpl:91 - qw422016.N().S(`s ago | `) -//line lib/promscrape/targets_response.qtpl:92 - qw422016.N().FPrec(ts.scrapeDuration.Seconds(), 3) -//line lib/promscrape/targets_response.qtpl:92 - qw422016.N().S(`s | `) -//line lib/promscrape/targets_response.qtpl:93 - qw422016.N().D(ts.samplesScraped) -//line lib/promscrape/targets_response.qtpl:93 +//line lib/promscrape/targets_response.qtpl:100 + qw422016.N().D(ts.scrapesTotal) +//line lib/promscrape/targets_response.qtpl:100 qw422016.N().S(` | `) -//line lib/promscrape/targets_response.qtpl:94 - qw422016.E().S(ts.errMsg) -//line lib/promscrape/targets_response.qtpl:94 - qw422016.N().S(` |
Endpoint | State | Labels | Last Scrape | Scrape Duration | Samples Scraped | Error |
---|
Endpoint | State | Labels | Last Scrape | Scrape Duration | Samples Scraped | Error |
---|