lib/promscrape: use target arg in ScrapeWork cache

This commit is contained in:
Aliaksandr Valialkin 2021-03-01 12:29:09 +02:00
parent 402543e7c6
commit 8af9370bf2
2 changed files with 12 additions and 4 deletions

View file

@ -728,7 +728,7 @@ func (stc *StaticConfig) appendScrapeWork(dst []*ScrapeWork, swc *scrapeWorkConf
func (swc *scrapeWorkConfig) getScrapeWork(target string, extraLabels, metaLabels map[string]string) (*ScrapeWork, error) { func (swc *scrapeWorkConfig) getScrapeWork(target string, extraLabels, metaLabels map[string]string) (*ScrapeWork, error) {
bb := scrapeWorkKeyBufPool.Get() bb := scrapeWorkKeyBufPool.Get()
defer scrapeWorkKeyBufPool.Put(bb) defer scrapeWorkKeyBufPool.Put(bb)
bb.B = appendScrapeWorkKey(bb.B[:0], extraLabels, metaLabels) bb.B = appendScrapeWorkKey(bb.B[:0], target, extraLabels, metaLabels)
keyStrUnsafe := bytesutil.ToUnsafeString(bb.B) keyStrUnsafe := bytesutil.ToUnsafeString(bb.B)
if needSkipScrapeWork(keyStrUnsafe) { if needSkipScrapeWork(keyStrUnsafe) {
return nil, nil return nil, nil
@ -745,7 +745,9 @@ func (swc *scrapeWorkConfig) getScrapeWork(target string, extraLabels, metaLabel
var scrapeWorkKeyBufPool bytesutil.ByteBufferPool var scrapeWorkKeyBufPool bytesutil.ByteBufferPool
func appendScrapeWorkKey(dst []byte, extraLabels, metaLabels map[string]string) []byte { func appendScrapeWorkKey(dst []byte, target string, extraLabels, metaLabels map[string]string) []byte {
dst = append(dst, target...)
dst = append(dst, ',')
dst = appendSortedKeyValuePairs(dst, extraLabels) dst = appendSortedKeyValuePairs(dst, extraLabels)
dst = appendSortedKeyValuePairs(dst, metaLabels) dst = appendSortedKeyValuePairs(dst, metaLabels)
return dst return dst

View file

@ -4,6 +4,7 @@ import (
"crypto/tls" "crypto/tls"
"fmt" "fmt"
"reflect" "reflect"
"strconv"
"testing" "testing"
"time" "time"
@ -444,6 +445,11 @@ func resetNonEssentialFields(sws []*ScrapeWork) {
} }
} }
// String returns human-readable representation for sw.
func (sw *ScrapeWork) String() string {
return strconv.Quote(sw.key())
}
func TestGetFileSDScrapeWorkSuccess(t *testing.T) { func TestGetFileSDScrapeWorkSuccess(t *testing.T) {
f := func(data string, expectedSws []*ScrapeWork) { f := func(data string, expectedSws []*ScrapeWork) {
t.Helper() t.Helper()
@ -463,7 +469,7 @@ func TestGetFileSDScrapeWorkSuccess(t *testing.T) {
} }
} }
if !reflect.DeepEqual(sws, expectedSws) { if !reflect.DeepEqual(sws, expectedSws) {
t.Fatalf("unexpected scrapeWork; got\n%v\nwant\n%v", sws, expectedSws) t.Fatalf("unexpected scrapeWork; got\n%+v\nwant\n%+v", sws, expectedSws)
} }
} }
f(` f(`
@ -608,7 +614,7 @@ func TestGetStaticScrapeWorkSuccess(t *testing.T) {
} }
resetNonEssentialFields(sws) resetNonEssentialFields(sws)
if !reflect.DeepEqual(sws, expectedSws) { if !reflect.DeepEqual(sws, expectedSws) {
t.Fatalf("unexpected scrapeWork; got\n%v\nwant\n%v", sws, expectedSws) t.Fatalf("unexpected scrapeWork; got\n%+v\nwant\n%+v", sws, expectedSws)
} }
} }
f(``, nil) f(``, nil)