lib/promscrape: prevent from memory leaks on -promscrape.config reload when only a small part of scrape jobs is updated

This is a follow-up after 26b78ad707
This commit is contained in:
Aliaksandr Valialkin 2022-04-22 13:19:20 +03:00
parent 8d0fb4d69d
commit 67b10896d2
No known key found for this signature in database
GPG key ID: A72BEC6CD3D0DED1

View file

@ -166,6 +166,10 @@ func areEqualScrapeConfigs(a, b *ScrapeConfig) bool {
return string(sa) == string(sb)
}
func (sc *ScrapeConfig) unmarshal(data []byte) error {
return yaml.UnmarshalStrict(data, sc)
}
func (sc *ScrapeConfig) marshal() []byte {
data, err := yaml.Marshal(sc)
if err != nil {
@ -411,15 +415,29 @@ func (cfg *Config) parseData(data []byte, path string) ([]byte, error) {
// Initialize cfg.ScrapeConfigs
for i, sc := range cfg.ScrapeConfigs {
// Make a copy of sc in order to remove references to `data` memory.
// This should prevent from memory leaks on config reload.
sc = sc.clone()
cfg.ScrapeConfigs[i] = sc
swc, err := getScrapeWorkConfig(sc, cfg.baseDir, &cfg.Global)
if err != nil {
return nil, fmt.Errorf("cannot parse `scrape_config` #%d: %w", i+1, err)
return nil, fmt.Errorf("cannot parse `scrape_config`: %w", err)
}
sc.swc = swc
}
return dataNew, nil
}
func (sc *ScrapeConfig) clone() *ScrapeConfig {
data := sc.marshal()
var scCopy ScrapeConfig
if err := scCopy.unmarshal(data); err != nil {
logger.Panicf("BUG: cannot unmarshal scrape config: %s", err)
}
return &scCopy
}
func getSWSByJob(sws []*ScrapeWork) map[string][]*ScrapeWork {
m := make(map[string][]*ScrapeWork)
for _, sw := range sws {