lib/promscrape: consistently update /targets page after SIGHUP

This commit is contained in:
Aliaksandr Valialkin 2020-03-11 03:19:56 +02:00
parent 8939c19281
commit 187fd89c70
2 changed files with 28 additions and 6 deletions

View file

@ -64,6 +64,8 @@ func runScraper(configFile string, pushData func(wr *prompbmarshal.WriteRequest)
if err != nil { if err != nil {
logger.Fatalf("cannot parse `file_sd_config` from %q: %s", configFile, err) logger.Fatalf("cannot parse `file_sd_config` from %q: %s", configFile, err)
} }
tsmGlobal.RegisterAll(swsStatic)
tsmGlobal.RegisterAll(swsFileSD)
mustStop := false mustStop := false
for !mustStop { for !mustStop {
@ -98,9 +100,13 @@ func runScraper(configFile string, pushData func(wr *prompbmarshal.WriteRequest)
if err != nil { if err != nil {
logger.Errorf("cannot parse `file_sd_config` from %q: %s; continuing with the previous config", configFile, err) logger.Errorf("cannot parse `file_sd_config` from %q: %s; continuing with the previous config", configFile, err)
} }
tsmGlobal.UnregisterAll(swsStatic)
tsmGlobal.UnregisterAll(swsFileSD)
cfg = cfgNew cfg = cfgNew
swsStatic = swsStaticNew swsStatic = swsStaticNew
swsFileSD = swsFileSDNew swsFileSD = swsFileSDNew
tsmGlobal.RegisterAll(swsStatic)
tsmGlobal.RegisterAll(swsFileSD)
case <-globalStopCh: case <-globalStopCh:
mustStop = true mustStop = true
} }
@ -160,7 +166,9 @@ func runFileSDScrapers(sws []ScrapeWork, cfg *Config, pushData func(wr *prompbma
goto waitForChans goto waitForChans
} }
logger.Infof("restarting scrapers for changed `file_sd_config` targets") logger.Infof("restarting scrapers for changed `file_sd_config` targets")
tsmGlobal.UnregisterAll(sws)
sws = swsNew sws = swsNew
tsmGlobal.RegisterAll(sws)
case <-stopCh: case <-stopCh:
mustStop = true mustStop = true
} }

View file

@ -35,6 +35,25 @@ func (tsm *targetStatusMap) Reset() {
tsm.mu.Unlock() tsm.mu.Unlock()
} }
func (tsm *targetStatusMap) RegisterAll(sws []ScrapeWork) {
tsm.mu.Lock()
for i := range sws {
sw := &sws[i]
tsm.m[sw.ScrapeURL] = targetStatus{
sw: sw,
}
}
tsm.mu.Unlock()
}
func (tsm *targetStatusMap) UnregisterAll(sws []ScrapeWork) {
tsm.mu.Lock()
for i := range sws {
delete(tsm.m, sws[i].ScrapeURL)
}
tsm.mu.Unlock()
}
func (tsm *targetStatusMap) Update(sw *ScrapeWork, up bool, scrapeTime, scrapeDuration int64, err error) { func (tsm *targetStatusMap) Update(sw *ScrapeWork, up bool, scrapeTime, scrapeDuration int64, err error) {
tsm.mu.Lock() tsm.mu.Lock()
tsm.m[sw.ScrapeURL] = targetStatus{ tsm.m[sw.ScrapeURL] = targetStatus{
@ -50,12 +69,7 @@ func (tsm *targetStatusMap) Update(sw *ScrapeWork, up bool, scrapeTime, scrapeDu
func (tsm *targetStatusMap) WriteHumanReadable(w io.Writer) { func (tsm *targetStatusMap) WriteHumanReadable(w io.Writer) {
byJob := make(map[string][]targetStatus) byJob := make(map[string][]targetStatus)
tsm.mu.Lock() tsm.mu.Lock()
for k, st := range tsm.m { for _, st := range tsm.m {
if st.getDurationFromLastScrape() > 10*st.sw.ScrapeInterval {
// Remove obsolete targets
delete(tsm.m, k)
continue
}
job := "" job := ""
label := promrelabel.GetLabelByName(st.sw.Labels, "job") label := promrelabel.GetLabelByName(st.sw.Labels, "job")
if label != nil { if label != nil {