mirror of
https://github.com/VictoriaMetrics/VictoriaMetrics.git
synced 2025-01-10 15:14:09 +00:00
lib/promscrape: provide the ability to fetch target responses on behalf of vmagent or single-node VictoriaMetrics
This feature may be useful when debugging metrics for the given target located in isolated environment
This commit is contained in:
parent
1173964d8d
commit
678b3e71db
7 changed files with 173 additions and 102 deletions
|
@ -262,6 +262,14 @@ func requestHandler(w http.ResponseWriter, r *http.Request) bool {
|
|||
promscrapeTargetsRequests.Inc()
|
||||
promscrape.WriteHumanReadableTargetsStatus(w, r)
|
||||
return true
|
||||
case "/target_response":
|
||||
promscrapeTargetResponseRequests.Inc()
|
||||
if err := promscrape.WriteTargetResponse(w, r); err != nil {
|
||||
promscrapeTargetResponseErrors.Inc()
|
||||
httpserver.Errorf(w, r, "%s", err)
|
||||
return true
|
||||
}
|
||||
return true
|
||||
case "/config":
|
||||
if *configAuthKey != "" && r.FormValue("authKey") != *configAuthKey {
|
||||
err := &httpserver.ErrorWithStatusCode{
|
||||
|
@ -443,6 +451,9 @@ var (
|
|||
promscrapeTargetsRequests = metrics.NewCounter(`vmagent_http_requests_total{path="/targets"}`)
|
||||
promscrapeAPIV1TargetsRequests = metrics.NewCounter(`vmagent_http_requests_total{path="/api/v1/targets"}`)
|
||||
|
||||
promscrapeTargetResponseRequests = metrics.NewCounter(`vmagent_http_requests_total{path="/target_response"}`)
|
||||
promscrapeTargetResponseErrors = metrics.NewCounter(`vmagent_http_request_errors_total{path="/target_response"}`)
|
||||
|
||||
promscrapeConfigRequests = metrics.NewCounter(`vmagent_http_requests_total{path="/config"}`)
|
||||
|
||||
promscrapeConfigReloadRequests = metrics.NewCounter(`vmagent_http_requests_total{path="/-/reload"}`)
|
||||
|
|
|
@ -16,6 +16,7 @@ sort: 15
|
|||
* FEATURE: [vmalert](https://docs.victoriametrics.com/vmalert.html): add ability to configure notifiers (e.g. alertmanager) via a file in the way similar to Prometheus. See [these docs](https://docs.victoriametrics.com/vmalert.html#notifier-configuration-file), [this pull request](https://github.com/VictoriaMetrics/VictoriaMetrics/pull/2127).
|
||||
* FEATURE: [vmalert](https://docs.victoriametrics.com/vmalert.html): add support for Consul service discovery for notifiers. See [this issue](https://github.com/VictoriaMetrics/VictoriaMetrics/issues/1947).
|
||||
* FEATURE: [vmalert](https://docs.victoriametrics.com/vmalert.html): add support for specifying Basic Auth password for notifiers via a file. See [this issue](https://github.com/VictoriaMetrics/VictoriaMetrics/issues/1567).
|
||||
* FEATURE: [vmagent](https://docs.victoriametrics.com/vmagent.html): provide the ability to fetch target responses on behalf of `vmagent`. Click `fetch response` link for the needed target at `/targets` page. This feature may be useful for debugging responses from targets located in isolated environments.
|
||||
|
||||
* BUGFIX: return proper results from `highestMax()` function at [Graphite render API](https://docs.victoriametrics.com/#graphite-render-api-usage). Previously it was incorrectly returning timeseries with min peaks instead of max peaks.
|
||||
* BUGFIX: properly limit indexdb cache sizes. Previously they could exceed values set via `-memory.allowedPercent` and/or `-memory.allowedBytes` when `indexdb` contained many data parts. See [this issue](https://github.com/VictoriaMetrics/VictoriaMetrics/issues/2007).
|
||||
|
|
|
@ -373,14 +373,14 @@ func (sg *scraperGroup) update(sws []*ScrapeWork) {
|
|||
sg.activeScrapers.Inc()
|
||||
sg.scrapersStarted.Inc()
|
||||
sg.wg.Add(1)
|
||||
tsmGlobal.Register(sw)
|
||||
tsmGlobal.Register(&sc.sw)
|
||||
go func(sw *ScrapeWork) {
|
||||
defer func() {
|
||||
sg.wg.Done()
|
||||
close(sc.stoppedCh)
|
||||
}()
|
||||
sc.sw.run(sc.stopCh, sg.globalStopCh)
|
||||
tsmGlobal.Unregister(sw)
|
||||
tsmGlobal.Unregister(&sc.sw)
|
||||
sg.activeScrapers.Dec()
|
||||
sg.scrapersStopped.Inc()
|
||||
}(sw)
|
||||
|
|
|
@ -3,6 +3,7 @@ package promscrape
|
|||
import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"math"
|
||||
"math/bits"
|
||||
"strconv"
|
||||
|
@ -371,6 +372,22 @@ func (sw *scrapeWork) mustSwitchToStreamParseMode(responseSize int) bool {
|
|||
return sw.Config.canSwitchToStreamParseMode() && responseSize >= minResponseSizeForStreamParse.N
|
||||
}
|
||||
|
||||
// getTargetResponse() fetches response from sw target in the same way as when scraping the target.
|
||||
func (sw *scrapeWork) getTargetResponse() ([]byte, error) {
|
||||
if *streamParse || sw.Config.StreamParse || sw.mustSwitchToStreamParseMode(sw.prevBodyLen) {
|
||||
// Read the response in stream mode.
|
||||
sr, err := sw.GetStreamReader()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
data, err := ioutil.ReadAll(sr)
|
||||
sr.MustClose()
|
||||
return data, err
|
||||
}
|
||||
// Read the response in usual mode.
|
||||
return sw.ReadData(nil)
|
||||
}
|
||||
|
||||
func (sw *scrapeWork) scrapeInternal(scrapeTimestamp, realTimestamp int64) error {
|
||||
if *streamParse || sw.Config.StreamParse || sw.mustSwitchToStreamParseMode(sw.prevBodyLen) {
|
||||
// Read data from scrape targets in streaming manner.
|
||||
|
@ -455,7 +472,7 @@ func (sw *scrapeWork) scrapeInternal(scrapeTimestamp, realTimestamp int64) error
|
|||
// This should reduce memory usage when scraping targets which return big responses.
|
||||
leveledbytebufferpool.Put(body)
|
||||
}
|
||||
tsmGlobal.Update(sw.Config, sw.ScrapeGroup, up == 1, realTimestamp, int64(duration*1000), samplesScraped, err)
|
||||
tsmGlobal.Update(sw, sw.ScrapeGroup, up == 1, realTimestamp, int64(duration*1000), samplesScraped, err)
|
||||
return err
|
||||
}
|
||||
|
||||
|
@ -558,7 +575,7 @@ func (sw *scrapeWork) scrapeStream(scrapeTimestamp, realTimestamp int64) error {
|
|||
sw.storeLastScrape(sbr.body)
|
||||
}
|
||||
sw.finalizeLastScrape()
|
||||
tsmGlobal.Update(sw.Config, sw.ScrapeGroup, up == 1, realTimestamp, int64(duration*1000), samplesScraped, err)
|
||||
tsmGlobal.Update(sw, sw.ScrapeGroup, up == 1, realTimestamp, int64(duration*1000), samplesScraped, err)
|
||||
// Do not track active series in streaming mode, since this may need too big amounts of memory
|
||||
// when the target exports too big number of metrics.
|
||||
return err
|
||||
|
|
|
@ -76,7 +76,9 @@ job={%q= jobName %} (0/0 up)
|
|||
{% for j, ts := range js.targetsStatus %}
|
||||
{% if onlyUnhealthy && ts.up %}{% continue %}{% endif %}
|
||||
<tr {% if !ts.up %}{%space%}class="alert alert-danger" role="alert"{% endif %}>
|
||||
<td><a href="{%s ts.endpoint %}">{%s ts.endpoint %}</a><br></td>
|
||||
<td><a href="{%s ts.endpoint %}">{%s ts.endpoint %}</a> (
|
||||
<a href="target_response?id={%s ts.targetID %}" target="_blank">fetch response</a>
|
||||
)</td>
|
||||
<td>{% if ts.up %}UP{% else %}DOWN{% endif %}</td>
|
||||
<td>
|
||||
<button type="button" class="btn btn-sm btn-outline-info" onclick="document.getElementById('original_labels_{%d i %}_{%d j %}').style.display='block'">show original labels</button>{% space %}
|
||||
|
|
|
@ -265,167 +265,171 @@ func StreamTargetsResponseHTML(qw422016 *qt422016.Writer, jts []jobTargetsStatus
|
|||
//line lib/promscrape/targets_response.qtpl:79
|
||||
qw422016.E().S(ts.endpoint)
|
||||
//line lib/promscrape/targets_response.qtpl:79
|
||||
qw422016.N().S(`</a><br></td><td>`)
|
||||
qw422016.N().S(`</a> (<a href="target_response?id=`)
|
||||
//line lib/promscrape/targets_response.qtpl:80
|
||||
qw422016.E().S(ts.targetID)
|
||||
//line lib/promscrape/targets_response.qtpl:80
|
||||
qw422016.N().S(`" target="_blank">fetch response</a>)</td><td>`)
|
||||
//line lib/promscrape/targets_response.qtpl:82
|
||||
if ts.up {
|
||||
//line lib/promscrape/targets_response.qtpl:80
|
||||
//line lib/promscrape/targets_response.qtpl:82
|
||||
qw422016.N().S(`UP`)
|
||||
//line lib/promscrape/targets_response.qtpl:80
|
||||
//line lib/promscrape/targets_response.qtpl:82
|
||||
} else {
|
||||
//line lib/promscrape/targets_response.qtpl:80
|
||||
//line lib/promscrape/targets_response.qtpl:82
|
||||
qw422016.N().S(`DOWN`)
|
||||
//line lib/promscrape/targets_response.qtpl:80
|
||||
//line lib/promscrape/targets_response.qtpl:82
|
||||
}
|
||||
//line lib/promscrape/targets_response.qtpl:80
|
||||
//line lib/promscrape/targets_response.qtpl:82
|
||||
qw422016.N().S(`</td><td><button type="button" class="btn btn-sm btn-outline-info" onclick="document.getElementById('original_labels_`)
|
||||
//line lib/promscrape/targets_response.qtpl:82
|
||||
//line lib/promscrape/targets_response.qtpl:84
|
||||
qw422016.N().D(i)
|
||||
//line lib/promscrape/targets_response.qtpl:82
|
||||
//line lib/promscrape/targets_response.qtpl:84
|
||||
qw422016.N().S(`_`)
|
||||
//line lib/promscrape/targets_response.qtpl:82
|
||||
//line lib/promscrape/targets_response.qtpl:84
|
||||
qw422016.N().D(j)
|
||||
//line lib/promscrape/targets_response.qtpl:82
|
||||
//line lib/promscrape/targets_response.qtpl:84
|
||||
qw422016.N().S(`').style.display='block'">show original labels</button>`)
|
||||
//line lib/promscrape/targets_response.qtpl:82
|
||||
//line lib/promscrape/targets_response.qtpl:84
|
||||
qw422016.N().S(` `)
|
||||
//line lib/promscrape/targets_response.qtpl:83
|
||||
//line lib/promscrape/targets_response.qtpl:85
|
||||
streamformatLabel(qw422016, ts.labels)
|
||||
//line lib/promscrape/targets_response.qtpl:83
|
||||
//line lib/promscrape/targets_response.qtpl:85
|
||||
qw422016.N().S(`<div style="display:none" id="original_labels_`)
|
||||
//line lib/promscrape/targets_response.qtpl:84
|
||||
//line lib/promscrape/targets_response.qtpl:86
|
||||
qw422016.N().D(i)
|
||||
//line lib/promscrape/targets_response.qtpl:84
|
||||
//line lib/promscrape/targets_response.qtpl:86
|
||||
qw422016.N().S(`_`)
|
||||
//line lib/promscrape/targets_response.qtpl:84
|
||||
//line lib/promscrape/targets_response.qtpl:86
|
||||
qw422016.N().D(j)
|
||||
//line lib/promscrape/targets_response.qtpl:84
|
||||
//line lib/promscrape/targets_response.qtpl:86
|
||||
qw422016.N().S(`"><button type="button" class="btn btn-sm btn-outline-info" onclick="document.getElementById('original_labels_`)
|
||||
//line lib/promscrape/targets_response.qtpl:85
|
||||
//line lib/promscrape/targets_response.qtpl:87
|
||||
qw422016.N().D(i)
|
||||
//line lib/promscrape/targets_response.qtpl:85
|
||||
//line lib/promscrape/targets_response.qtpl:87
|
||||
qw422016.N().S(`_`)
|
||||
//line lib/promscrape/targets_response.qtpl:85
|
||||
//line lib/promscrape/targets_response.qtpl:87
|
||||
qw422016.N().D(j)
|
||||
//line lib/promscrape/targets_response.qtpl:85
|
||||
//line lib/promscrape/targets_response.qtpl:87
|
||||
qw422016.N().S(`').style.display='none'">hide original labels</button>`)
|
||||
//line lib/promscrape/targets_response.qtpl:85
|
||||
//line lib/promscrape/targets_response.qtpl:87
|
||||
qw422016.N().S(` `)
|
||||
//line lib/promscrape/targets_response.qtpl:86
|
||||
//line lib/promscrape/targets_response.qtpl:88
|
||||
streamformatLabel(qw422016, ts.originalLabels)
|
||||
//line lib/promscrape/targets_response.qtpl:86
|
||||
//line lib/promscrape/targets_response.qtpl:88
|
||||
qw422016.N().S(`</div></td><td>`)
|
||||
//line lib/promscrape/targets_response.qtpl:89
|
||||
//line lib/promscrape/targets_response.qtpl:91
|
||||
qw422016.N().FPrec(ts.lastScrapeTime.Seconds(), 3)
|
||||
//line lib/promscrape/targets_response.qtpl:89
|
||||
//line lib/promscrape/targets_response.qtpl:91
|
||||
qw422016.N().S(`s ago</td><td>`)
|
||||
//line lib/promscrape/targets_response.qtpl:90
|
||||
//line lib/promscrape/targets_response.qtpl:92
|
||||
qw422016.N().FPrec(ts.scrapeDuration.Seconds(), 3)
|
||||
//line lib/promscrape/targets_response.qtpl:90
|
||||
//line lib/promscrape/targets_response.qtpl:92
|
||||
qw422016.N().S(`s</td><td>`)
|
||||
//line lib/promscrape/targets_response.qtpl:91
|
||||
//line lib/promscrape/targets_response.qtpl:93
|
||||
qw422016.N().D(ts.samplesScraped)
|
||||
//line lib/promscrape/targets_response.qtpl:91
|
||||
//line lib/promscrape/targets_response.qtpl:93
|
||||
qw422016.N().S(`</td><td>`)
|
||||
//line lib/promscrape/targets_response.qtpl:92
|
||||
//line lib/promscrape/targets_response.qtpl:94
|
||||
qw422016.E().S(ts.errMsg)
|
||||
//line lib/promscrape/targets_response.qtpl:92
|
||||
//line lib/promscrape/targets_response.qtpl:94
|
||||
qw422016.N().S(`</td></tr>`)
|
||||
//line lib/promscrape/targets_response.qtpl:94
|
||||
//line lib/promscrape/targets_response.qtpl:96
|
||||
}
|
||||
//line lib/promscrape/targets_response.qtpl:94
|
||||
//line lib/promscrape/targets_response.qtpl:96
|
||||
qw422016.N().S(`</tbody></table></div></div>`)
|
||||
//line lib/promscrape/targets_response.qtpl:99
|
||||
}
|
||||
//line lib/promscrape/targets_response.qtpl:101
|
||||
}
|
||||
//line lib/promscrape/targets_response.qtpl:103
|
||||
for _, jobName := range emptyJobs {
|
||||
//line lib/promscrape/targets_response.qtpl:101
|
||||
//line lib/promscrape/targets_response.qtpl:103
|
||||
qw422016.N().S(`<div><h4><a>`)
|
||||
//line lib/promscrape/targets_response.qtpl:104
|
||||
//line lib/promscrape/targets_response.qtpl:106
|
||||
qw422016.E().S(jobName)
|
||||
//line lib/promscrape/targets_response.qtpl:104
|
||||
//line lib/promscrape/targets_response.qtpl:106
|
||||
qw422016.N().S(`(0/0 up)</a></h4><table class="table table-striped table-hover table-bordered table-sm"><thead><tr><th scope="col">Endpoint</th><th scope="col">State</th><th scope="col">Labels</th><th scope="col">Last Scrape</th><th scope="col">Scrape Duration</th><th scope="col">Samples Scraped</th><th scope="col">Error</th></tr></thead></table></div>`)
|
||||
//line lib/promscrape/targets_response.qtpl:120
|
||||
//line lib/promscrape/targets_response.qtpl:122
|
||||
}
|
||||
//line lib/promscrape/targets_response.qtpl:120
|
||||
//line lib/promscrape/targets_response.qtpl:122
|
||||
qw422016.N().S(`</body></html>`)
|
||||
//line lib/promscrape/targets_response.qtpl:123
|
||||
//line lib/promscrape/targets_response.qtpl:125
|
||||
}
|
||||
|
||||
//line lib/promscrape/targets_response.qtpl:123
|
||||
//line lib/promscrape/targets_response.qtpl:125
|
||||
func WriteTargetsResponseHTML(qq422016 qtio422016.Writer, jts []jobTargetsStatuses, emptyJobs []string, onlyUnhealthy bool) {
|
||||
//line lib/promscrape/targets_response.qtpl:123
|
||||
//line lib/promscrape/targets_response.qtpl:125
|
||||
qw422016 := qt422016.AcquireWriter(qq422016)
|
||||
//line lib/promscrape/targets_response.qtpl:123
|
||||
//line lib/promscrape/targets_response.qtpl:125
|
||||
StreamTargetsResponseHTML(qw422016, jts, emptyJobs, onlyUnhealthy)
|
||||
//line lib/promscrape/targets_response.qtpl:123
|
||||
//line lib/promscrape/targets_response.qtpl:125
|
||||
qt422016.ReleaseWriter(qw422016)
|
||||
//line lib/promscrape/targets_response.qtpl:123
|
||||
//line lib/promscrape/targets_response.qtpl:125
|
||||
}
|
||||
|
||||
//line lib/promscrape/targets_response.qtpl:123
|
||||
//line lib/promscrape/targets_response.qtpl:125
|
||||
func TargetsResponseHTML(jts []jobTargetsStatuses, emptyJobs []string, onlyUnhealthy bool) string {
|
||||
//line lib/promscrape/targets_response.qtpl:123
|
||||
//line lib/promscrape/targets_response.qtpl:125
|
||||
qb422016 := qt422016.AcquireByteBuffer()
|
||||
//line lib/promscrape/targets_response.qtpl:123
|
||||
//line lib/promscrape/targets_response.qtpl:125
|
||||
WriteTargetsResponseHTML(qb422016, jts, emptyJobs, onlyUnhealthy)
|
||||
//line lib/promscrape/targets_response.qtpl:123
|
||||
//line lib/promscrape/targets_response.qtpl:125
|
||||
qs422016 := string(qb422016.B)
|
||||
//line lib/promscrape/targets_response.qtpl:123
|
||||
//line lib/promscrape/targets_response.qtpl:125
|
||||
qt422016.ReleaseByteBuffer(qb422016)
|
||||
//line lib/promscrape/targets_response.qtpl:123
|
||||
//line lib/promscrape/targets_response.qtpl:125
|
||||
return qs422016
|
||||
//line lib/promscrape/targets_response.qtpl:123
|
||||
//line lib/promscrape/targets_response.qtpl:125
|
||||
}
|
||||
|
||||
//line lib/promscrape/targets_response.qtpl:125
|
||||
func streamformatLabel(qw422016 *qt422016.Writer, labels []prompbmarshal.Label) {
|
||||
//line lib/promscrape/targets_response.qtpl:125
|
||||
qw422016.N().S(`{`)
|
||||
//line lib/promscrape/targets_response.qtpl:127
|
||||
func streamformatLabel(qw422016 *qt422016.Writer, labels []prompbmarshal.Label) {
|
||||
//line lib/promscrape/targets_response.qtpl:127
|
||||
qw422016.N().S(`{`)
|
||||
//line lib/promscrape/targets_response.qtpl:129
|
||||
for i, label := range labels {
|
||||
//line lib/promscrape/targets_response.qtpl:128
|
||||
//line lib/promscrape/targets_response.qtpl:130
|
||||
qw422016.E().S(label.Name)
|
||||
//line lib/promscrape/targets_response.qtpl:128
|
||||
//line lib/promscrape/targets_response.qtpl:130
|
||||
qw422016.N().S(`=`)
|
||||
//line lib/promscrape/targets_response.qtpl:128
|
||||
//line lib/promscrape/targets_response.qtpl:130
|
||||
qw422016.E().Q(label.Value)
|
||||
//line lib/promscrape/targets_response.qtpl:129
|
||||
//line lib/promscrape/targets_response.qtpl:131
|
||||
if i+1 < len(labels) {
|
||||
//line lib/promscrape/targets_response.qtpl:129
|
||||
//line lib/promscrape/targets_response.qtpl:131
|
||||
qw422016.N().S(`,`)
|
||||
//line lib/promscrape/targets_response.qtpl:129
|
||||
//line lib/promscrape/targets_response.qtpl:131
|
||||
qw422016.N().S(` `)
|
||||
//line lib/promscrape/targets_response.qtpl:129
|
||||
//line lib/promscrape/targets_response.qtpl:131
|
||||
}
|
||||
//line lib/promscrape/targets_response.qtpl:130
|
||||
//line lib/promscrape/targets_response.qtpl:132
|
||||
}
|
||||
//line lib/promscrape/targets_response.qtpl:130
|
||||
//line lib/promscrape/targets_response.qtpl:132
|
||||
qw422016.N().S(`}`)
|
||||
//line lib/promscrape/targets_response.qtpl:132
|
||||
//line lib/promscrape/targets_response.qtpl:134
|
||||
}
|
||||
|
||||
//line lib/promscrape/targets_response.qtpl:132
|
||||
//line lib/promscrape/targets_response.qtpl:134
|
||||
func writeformatLabel(qq422016 qtio422016.Writer, labels []prompbmarshal.Label) {
|
||||
//line lib/promscrape/targets_response.qtpl:132
|
||||
//line lib/promscrape/targets_response.qtpl:134
|
||||
qw422016 := qt422016.AcquireWriter(qq422016)
|
||||
//line lib/promscrape/targets_response.qtpl:132
|
||||
//line lib/promscrape/targets_response.qtpl:134
|
||||
streamformatLabel(qw422016, labels)
|
||||
//line lib/promscrape/targets_response.qtpl:132
|
||||
//line lib/promscrape/targets_response.qtpl:134
|
||||
qt422016.ReleaseWriter(qw422016)
|
||||
//line lib/promscrape/targets_response.qtpl:132
|
||||
//line lib/promscrape/targets_response.qtpl:134
|
||||
}
|
||||
|
||||
//line lib/promscrape/targets_response.qtpl:132
|
||||
//line lib/promscrape/targets_response.qtpl:134
|
||||
func formatLabel(labels []prompbmarshal.Label) string {
|
||||
//line lib/promscrape/targets_response.qtpl:132
|
||||
//line lib/promscrape/targets_response.qtpl:134
|
||||
qb422016 := qt422016.AcquireByteBuffer()
|
||||
//line lib/promscrape/targets_response.qtpl:132
|
||||
//line lib/promscrape/targets_response.qtpl:134
|
||||
writeformatLabel(qb422016, labels)
|
||||
//line lib/promscrape/targets_response.qtpl:132
|
||||
//line lib/promscrape/targets_response.qtpl:134
|
||||
qs422016 := string(qb422016.B)
|
||||
//line lib/promscrape/targets_response.qtpl:132
|
||||
//line lib/promscrape/targets_response.qtpl:134
|
||||
qt422016.ReleaseByteBuffer(qb422016)
|
||||
//line lib/promscrape/targets_response.qtpl:132
|
||||
//line lib/promscrape/targets_response.qtpl:134
|
||||
return qs422016
|
||||
//line lib/promscrape/targets_response.qtpl:132
|
||||
//line lib/promscrape/targets_response.qtpl:134
|
||||
}
|
||||
|
|
|
@ -10,6 +10,7 @@ import (
|
|||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
"unsafe"
|
||||
|
||||
"github.com/VictoriaMetrics/VictoriaMetrics/lib/fasttime"
|
||||
"github.com/VictoriaMetrics/VictoriaMetrics/lib/prompbmarshal"
|
||||
|
@ -22,6 +23,24 @@ var maxDroppedTargets = flag.Int("promscrape.maxDroppedTargets", 1000, "The maxi
|
|||
|
||||
var tsmGlobal = newTargetStatusMap()
|
||||
|
||||
// WriteTargetResponse serves requests to /target_response?id=<id>
|
||||
//
|
||||
// It fetches response for the given target id and returns it.
|
||||
func WriteTargetResponse(w http.ResponseWriter, r *http.Request) error {
|
||||
targetID := r.FormValue("id")
|
||||
sw := tsmGlobal.getScrapeWorkByTargetID(targetID)
|
||||
if sw == nil {
|
||||
return fmt.Errorf("cannot find target for id=%s", targetID)
|
||||
}
|
||||
data, err := sw.getTargetResponse()
|
||||
if err != nil {
|
||||
return fmt.Errorf("cannot fetch response from id=%s: %w", targetID, err)
|
||||
}
|
||||
w.Header().Set("Content-Type", "text/plain; charset=utf-8")
|
||||
_, err = w.Write(data)
|
||||
return err
|
||||
}
|
||||
|
||||
// WriteHumanReadableTargetsStatus writes human-readable status for all the scrape targets to w according to r.
|
||||
func WriteHumanReadableTargetsStatus(w http.ResponseWriter, r *http.Request) {
|
||||
showOriginalLabels, _ := strconv.ParseBool(r.FormValue("show_original_labels"))
|
||||
|
@ -57,19 +76,19 @@ func WriteAPIV1Targets(w io.Writer, state string) {
|
|||
|
||||
type targetStatusMap struct {
|
||||
mu sync.Mutex
|
||||
m map[*ScrapeWork]*targetStatus
|
||||
m map[*scrapeWork]*targetStatus
|
||||
jobNames []string
|
||||
}
|
||||
|
||||
func newTargetStatusMap() *targetStatusMap {
|
||||
return &targetStatusMap{
|
||||
m: make(map[*ScrapeWork]*targetStatus),
|
||||
m: make(map[*scrapeWork]*targetStatus),
|
||||
}
|
||||
}
|
||||
|
||||
func (tsm *targetStatusMap) Reset() {
|
||||
tsm.mu.Lock()
|
||||
tsm.m = make(map[*ScrapeWork]*targetStatus)
|
||||
tsm.m = make(map[*scrapeWork]*targetStatus)
|
||||
tsm.mu.Unlock()
|
||||
}
|
||||
|
||||
|
@ -79,7 +98,7 @@ func (tsm *targetStatusMap) registerJobNames(jobNames []string) {
|
|||
tsm.mu.Unlock()
|
||||
}
|
||||
|
||||
func (tsm *targetStatusMap) Register(sw *ScrapeWork) {
|
||||
func (tsm *targetStatusMap) Register(sw *scrapeWork) {
|
||||
tsm.mu.Lock()
|
||||
tsm.m[sw] = &targetStatus{
|
||||
sw: sw,
|
||||
|
@ -87,13 +106,13 @@ func (tsm *targetStatusMap) Register(sw *ScrapeWork) {
|
|||
tsm.mu.Unlock()
|
||||
}
|
||||
|
||||
func (tsm *targetStatusMap) Unregister(sw *ScrapeWork) {
|
||||
func (tsm *targetStatusMap) Unregister(sw *scrapeWork) {
|
||||
tsm.mu.Lock()
|
||||
delete(tsm.m, sw)
|
||||
tsm.mu.Unlock()
|
||||
}
|
||||
|
||||
func (tsm *targetStatusMap) Update(sw *ScrapeWork, group string, up bool, scrapeTime, scrapeDuration int64, samplesScraped int, err error) {
|
||||
func (tsm *targetStatusMap) Update(sw *scrapeWork, group string, up bool, scrapeTime, scrapeDuration int64, samplesScraped int, err error) {
|
||||
tsm.mu.Lock()
|
||||
ts := tsm.m[sw]
|
||||
if ts == nil {
|
||||
|
@ -111,6 +130,21 @@ func (tsm *targetStatusMap) Update(sw *ScrapeWork, group string, up bool, scrape
|
|||
tsm.mu.Unlock()
|
||||
}
|
||||
|
||||
func (tsm *targetStatusMap) getScrapeWorkByTargetID(targetID string) *scrapeWork {
|
||||
tsm.mu.Lock()
|
||||
defer tsm.mu.Unlock()
|
||||
for sw := range tsm.m {
|
||||
if getTargetID(sw) == targetID {
|
||||
return sw
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func getTargetID(sw *scrapeWork) string {
|
||||
return fmt.Sprintf("%016x", uintptr(unsafe.Pointer(sw)))
|
||||
}
|
||||
|
||||
// StatusByGroup returns the number of targets with status==up
|
||||
// for the given group name
|
||||
func (tsm *targetStatusMap) StatusByGroup(group string, up bool) int {
|
||||
|
@ -134,7 +168,7 @@ func (tsm *targetStatusMap) WriteActiveTargetsJSON(w io.Writer) {
|
|||
}
|
||||
kss := make([]keyStatus, 0, len(tsm.m))
|
||||
for sw, st := range tsm.m {
|
||||
key := promLabelsString(sw.OriginalLabels)
|
||||
key := promLabelsString(sw.Config.OriginalLabels)
|
||||
kss = append(kss, keyStatus{
|
||||
key: key,
|
||||
st: *st,
|
||||
|
@ -149,12 +183,12 @@ func (tsm *targetStatusMap) WriteActiveTargetsJSON(w io.Writer) {
|
|||
for i, ks := range kss {
|
||||
st := ks.st
|
||||
fmt.Fprintf(w, `{"discoveredLabels":`)
|
||||
writeLabelsJSON(w, st.sw.OriginalLabels)
|
||||
writeLabelsJSON(w, st.sw.Config.OriginalLabels)
|
||||
fmt.Fprintf(w, `,"labels":`)
|
||||
labelsFinalized := promrelabel.FinalizeLabels(nil, st.sw.Labels)
|
||||
labelsFinalized := promrelabel.FinalizeLabels(nil, st.sw.Config.Labels)
|
||||
writeLabelsJSON(w, labelsFinalized)
|
||||
fmt.Fprintf(w, `,"scrapePool":%q`, st.sw.Job())
|
||||
fmt.Fprintf(w, `,"scrapeUrl":%q`, st.sw.ScrapeURL)
|
||||
fmt.Fprintf(w, `,"scrapePool":%q`, st.sw.Config.Job())
|
||||
fmt.Fprintf(w, `,"scrapeUrl":%q`, st.sw.Config.ScrapeURL)
|
||||
errMsg := ""
|
||||
if st.err != nil {
|
||||
errMsg = st.err.Error()
|
||||
|
@ -187,7 +221,7 @@ func writeLabelsJSON(w io.Writer, labels []prompbmarshal.Label) {
|
|||
}
|
||||
|
||||
type targetStatus struct {
|
||||
sw *ScrapeWork
|
||||
sw *scrapeWork
|
||||
up bool
|
||||
scrapeGroup string
|
||||
scrapeTime int64
|
||||
|
@ -274,6 +308,7 @@ var droppedTargetsMap = &droppedTargets{
|
|||
type jobTargetStatus struct {
|
||||
up bool
|
||||
endpoint string
|
||||
targetID string
|
||||
labels []prompbmarshal.Label
|
||||
originalLabels []prompbmarshal.Label
|
||||
lastScrapeTime time.Duration
|
||||
|
@ -293,7 +328,7 @@ func (tsm *targetStatusMap) getTargetsStatusByJob() ([]jobTargetsStatuses, []str
|
|||
byJob := make(map[string][]targetStatus)
|
||||
tsm.mu.Lock()
|
||||
for _, st := range tsm.m {
|
||||
job := st.sw.jobNameOriginal
|
||||
job := st.sw.Config.jobNameOriginal
|
||||
byJob[job] = append(byJob[job], *st)
|
||||
}
|
||||
jobNames := append([]string{}, tsm.jobNames...)
|
||||
|
@ -302,7 +337,7 @@ func (tsm *targetStatusMap) getTargetsStatusByJob() ([]jobTargetsStatuses, []str
|
|||
var jts []jobTargetsStatuses
|
||||
for job, statuses := range byJob {
|
||||
sort.Slice(statuses, func(i, j int) bool {
|
||||
return statuses[i].sw.ScrapeURL < statuses[j].sw.ScrapeURL
|
||||
return statuses[i].sw.Config.ScrapeURL < statuses[j].sw.Config.ScrapeURL
|
||||
})
|
||||
ups := 0
|
||||
var targetsStatuses []jobTargetStatus
|
||||
|
@ -318,9 +353,10 @@ func (tsm *targetStatusMap) getTargetsStatusByJob() ([]jobTargetsStatuses, []str
|
|||
}
|
||||
targetsStatuses = append(targetsStatuses, jobTargetStatus{
|
||||
up: st.up,
|
||||
endpoint: st.sw.ScrapeURL,
|
||||
labels: promrelabel.FinalizeLabels(nil, st.sw.Labels),
|
||||
originalLabels: st.sw.OriginalLabels,
|
||||
endpoint: st.sw.Config.ScrapeURL,
|
||||
targetID: getTargetID(st.sw),
|
||||
labels: promrelabel.FinalizeLabels(nil, st.sw.Config.Labels),
|
||||
originalLabels: st.sw.Config.OriginalLabels,
|
||||
lastScrapeTime: st.getDurationFromLastScrape(),
|
||||
scrapeDuration: time.Duration(st.scrapeDuration) * time.Millisecond,
|
||||
samplesScraped: st.samplesScraped,
|
||||
|
|
Loading…
Reference in a new issue