From 257521a634e7a9200d0ad93c4ce824301632b4db Mon Sep 17 00:00:00 2001
From: Aliaksandr Valialkin <valyala@gmail.com>
Date: Tue, 14 Apr 2020 13:08:48 +0300
Subject: [PATCH] lib/promscrape: properly expose statuses for targets with
 duplicate scrape urls at `/targets` page

Previously targets with duplicate scrape urls were merged into a single line on the page.
Now each target with duplicate scrape url is displayed on a separate line.
---
 lib/promscrape/config.go       |  5 +++++
 lib/promscrape/scrapework.go   |  3 +++
 lib/promscrape/targetstatus.go | 12 ++++++------
 3 files changed, 14 insertions(+), 6 deletions(-)

diff --git a/lib/promscrape/config.go b/lib/promscrape/config.go
index d4117cbe09..6aac57a406 100644
--- a/lib/promscrape/config.go
+++ b/lib/promscrape/config.go
@@ -7,6 +7,7 @@ import (
 	"net/url"
 	"path/filepath"
 	"strings"
+	"sync/atomic"
 	"time"
 
 	"github.com/VictoriaMetrics/VictoriaMetrics/lib/logger"
@@ -474,6 +475,7 @@ func appendScrapeWork(dst []ScrapeWork, swc *scrapeWorkConfig, target string, ex
 			scrapeURL, swc.scheme, schemeRelabeled, target, targetRelabeled, swc.metricsPath, metricsPathRelabeled, swc.jobName, err)
 	}
 	dst = append(dst, ScrapeWork{
+		ID:                   atomic.AddUint64(&nextScrapeWorkID, 1),
 		ScrapeURL:            scrapeURL,
 		ScrapeInterval:       swc.scrapeInterval,
 		ScrapeTimeout:        swc.scrapeTimeout,
@@ -487,6 +489,9 @@ func appendScrapeWork(dst []ScrapeWork, swc *scrapeWorkConfig, target string, ex
 	return dst, nil
 }
 
+// Each ScrapeWork has an ID, which is used for locating it when updating its status.
+var nextScrapeWorkID uint64
+
 func getParamsFromLabels(labels []prompbmarshal.Label, paramsOrig map[string][]string) map[string][]string {
 	// See https://www.robustperception.io/life-of-a-label
 	m := make(map[string][]string)
diff --git a/lib/promscrape/scrapework.go b/lib/promscrape/scrapework.go
index 8371270338..28b28530fc 100644
--- a/lib/promscrape/scrapework.go
+++ b/lib/promscrape/scrapework.go
@@ -15,6 +15,9 @@ import (
 
 // ScrapeWork represents a unit of work for scraping Prometheus metrics.
 type ScrapeWork struct {
+	// Unique ID for the ScrapeWork.
+	ID uint64
+
 	// Full URL (including query args) for the scrape.
 	ScrapeURL string
 
diff --git a/lib/promscrape/targetstatus.go b/lib/promscrape/targetstatus.go
index 390ab355e6..0c37590064 100644
--- a/lib/promscrape/targetstatus.go
+++ b/lib/promscrape/targetstatus.go
@@ -20,18 +20,18 @@ func WriteHumanReadableTargetsStatus(w io.Writer) {
 
 type targetStatusMap struct {
 	mu sync.Mutex
-	m  map[string]targetStatus
+	m  map[uint64]targetStatus
 }
 
 func newTargetStatusMap() *targetStatusMap {
 	return &targetStatusMap{
-		m: make(map[string]targetStatus),
+		m: make(map[uint64]targetStatus),
 	}
 }
 
 func (tsm *targetStatusMap) Reset() {
 	tsm.mu.Lock()
-	tsm.m = make(map[string]targetStatus)
+	tsm.m = make(map[uint64]targetStatus)
 	tsm.mu.Unlock()
 }
 
@@ -39,7 +39,7 @@ func (tsm *targetStatusMap) RegisterAll(sws []ScrapeWork) {
 	tsm.mu.Lock()
 	for i := range sws {
 		sw := &sws[i]
-		tsm.m[sw.ScrapeURL] = targetStatus{
+		tsm.m[sw.ID] = targetStatus{
 			sw: sw,
 		}
 	}
@@ -49,14 +49,14 @@ func (tsm *targetStatusMap) RegisterAll(sws []ScrapeWork) {
 func (tsm *targetStatusMap) UnregisterAll(sws []ScrapeWork) {
 	tsm.mu.Lock()
 	for i := range sws {
-		delete(tsm.m, sws[i].ScrapeURL)
+		delete(tsm.m, sws[i].ID)
 	}
 	tsm.mu.Unlock()
 }
 
 func (tsm *targetStatusMap) Update(sw *ScrapeWork, up bool, scrapeTime, scrapeDuration int64, err error) {
 	tsm.mu.Lock()
-	tsm.m[sw.ScrapeURL] = targetStatus{
+	tsm.m[sw.ID] = targetStatus{
 		sw:             sw,
 		up:             up,
 		scrapeTime:     scrapeTime,