lib/promscrape: properly expose statuses for targets with duplicate scrape urls at /targets page

Previously targets with duplicate scrape urls were merged into a single line on the page.
Now each target with duplicate scrape url is displayed on a separate line.
This commit is contained in:
Aliaksandr Valialkin 2020-04-14 13:08:48 +03:00
parent 09f796e2ab
commit 88366cad15
3 changed files with 14 additions and 6 deletions

View file

@ -7,6 +7,7 @@ import (
"net/url"
"path/filepath"
"strings"
"sync/atomic"
"time"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/logger"
@ -474,6 +475,7 @@ func appendScrapeWork(dst []ScrapeWork, swc *scrapeWorkConfig, target string, ex
scrapeURL, swc.scheme, schemeRelabeled, target, targetRelabeled, swc.metricsPath, metricsPathRelabeled, swc.jobName, err)
}
dst = append(dst, ScrapeWork{
ID: atomic.AddUint64(&nextScrapeWorkID, 1),
ScrapeURL: scrapeURL,
ScrapeInterval: swc.scrapeInterval,
ScrapeTimeout: swc.scrapeTimeout,
@ -487,6 +489,9 @@ func appendScrapeWork(dst []ScrapeWork, swc *scrapeWorkConfig, target string, ex
return dst, nil
}
// Each ScrapeWork has an ID, which is used for locating it when updating its status.
var nextScrapeWorkID uint64
func getParamsFromLabels(labels []prompbmarshal.Label, paramsOrig map[string][]string) map[string][]string {
// See https://www.robustperception.io/life-of-a-label
m := make(map[string][]string)

View file

@ -15,6 +15,9 @@ import (
// ScrapeWork represents a unit of work for scraping Prometheus metrics.
type ScrapeWork struct {
// Unique ID for the ScrapeWork.
ID uint64
// Full URL (including query args) for the scrape.
ScrapeURL string

View file

@ -20,18 +20,18 @@ func WriteHumanReadableTargetsStatus(w io.Writer) {
type targetStatusMap struct {
mu sync.Mutex
m map[string]targetStatus
m map[uint64]targetStatus
}
func newTargetStatusMap() *targetStatusMap {
return &targetStatusMap{
m: make(map[string]targetStatus),
m: make(map[uint64]targetStatus),
}
}
func (tsm *targetStatusMap) Reset() {
tsm.mu.Lock()
tsm.m = make(map[string]targetStatus)
tsm.m = make(map[uint64]targetStatus)
tsm.mu.Unlock()
}
@ -39,7 +39,7 @@ func (tsm *targetStatusMap) RegisterAll(sws []ScrapeWork) {
tsm.mu.Lock()
for i := range sws {
sw := &sws[i]
tsm.m[sw.ScrapeURL] = targetStatus{
tsm.m[sw.ID] = targetStatus{
sw: sw,
}
}
@ -49,14 +49,14 @@ func (tsm *targetStatusMap) RegisterAll(sws []ScrapeWork) {
func (tsm *targetStatusMap) UnregisterAll(sws []ScrapeWork) {
tsm.mu.Lock()
for i := range sws {
delete(tsm.m, sws[i].ScrapeURL)
delete(tsm.m, sws[i].ID)
}
tsm.mu.Unlock()
}
func (tsm *targetStatusMap) Update(sw *ScrapeWork, up bool, scrapeTime, scrapeDuration int64, err error) {
tsm.mu.Lock()
tsm.m[sw.ScrapeURL] = targetStatus{
tsm.m[sw.ID] = targetStatus{
sw: sw,
up: up,
scrapeTime: scrapeTime,