From f58d15f27c4a9743bcc6d42fc5b728a176361f96 Mon Sep 17 00:00:00 2001
From: Aliaksandr Valialkin <valyala@gmail.com>
Date: Tue, 14 Apr 2020 11:58:15 +0300
Subject: [PATCH] lib/promscrape: rename 'scrape_config->scrape_limit' to
 'scrape_config->sample_limit'

`scrape_config` block from Prometheus config contains `sample_limit` field,
while in `vmagent` this field was mistakenly named as `scrape_limit`.
---
 lib/promscrape/config.go          | 9 ++++-----
 lib/promscrape/scrapework.go      | 8 ++++----
 lib/promscrape/scrapework_test.go | 2 +-
 3 files changed, 9 insertions(+), 10 deletions(-)

diff --git a/lib/promscrape/config.go b/lib/promscrape/config.go
index f26bbb48d..a9950af73 100644
--- a/lib/promscrape/config.go
+++ b/lib/promscrape/config.go
@@ -61,7 +61,7 @@ type ScrapeConfig struct {
 	KubernetesSDConfigs  []KubernetesSDConfig        `yaml:"kubernetes_sd_configs"`
 	RelabelConfigs       []promrelabel.RelabelConfig `yaml:"relabel_configs"`
 	MetricRelabelConfigs []promrelabel.RelabelConfig `yaml:"metric_relabel_configs"`
-	ScrapeLimit          int                         `yaml:"scrape_limit"`
+	SampleLimit          int                         `yaml:"sample_limit"`
 
 	// This is set in loadConfig
 	swc *scrapeWorkConfig
@@ -264,7 +264,6 @@ func getScrapeWorkConfig(sc *ScrapeConfig, baseDir string, globalCfg *GlobalConf
 	if err != nil {
 		return nil, fmt.Errorf("cannot parse `metric_relabel_configs` for `job_name` %q: %s", jobName, err)
 	}
-	scrapeLimit := sc.ScrapeLimit
 	swc := &scrapeWorkConfig{
 		scrapeInterval:       scrapeInterval,
 		scrapeTimeout:        scrapeTimeout,
@@ -278,7 +277,7 @@ func getScrapeWorkConfig(sc *ScrapeConfig, baseDir string, globalCfg *GlobalConf
 		externalLabels:       globalCfg.ExternalLabels,
 		relabelConfigs:       relabelConfigs,
 		metricRelabelConfigs: metricRelabelConfigs,
-		scrapeLimit:          scrapeLimit,
+		sampleLimit:          sc.SampleLimit,
 	}
 	return swc, nil
 }
@@ -296,7 +295,7 @@ type scrapeWorkConfig struct {
 	externalLabels       map[string]string
 	relabelConfigs       []promrelabel.ParsedRelabelConfig
 	metricRelabelConfigs []promrelabel.ParsedRelabelConfig
-	scrapeLimit          int
+	sampleLimit          int
 }
 
 func (sdc *KubernetesSDConfig) appendScrapeWork(dst []ScrapeWork, baseDir string, swc *scrapeWorkConfig) []ScrapeWork {
@@ -481,7 +480,7 @@ func appendScrapeWork(dst []ScrapeWork, swc *scrapeWorkConfig, target string, ex
 		Labels:               labels,
 		AuthConfig:           swc.authConfig,
 		MetricRelabelConfigs: swc.metricRelabelConfigs,
-		ScrapeLimit:          swc.scrapeLimit,
+		SampleLimit:          swc.sampleLimit,
 	})
 	return dst, nil
 }
diff --git a/lib/promscrape/scrapework.go b/lib/promscrape/scrapework.go
index db944f010..837127033 100644
--- a/lib/promscrape/scrapework.go
+++ b/lib/promscrape/scrapework.go
@@ -54,7 +54,7 @@ type ScrapeWork struct {
 	MetricRelabelConfigs []promrelabel.ParsedRelabelConfig
 
 	// The maximum number of metrics to scrape after relabeling.
-	ScrapeLimit int
+	SampleLimit int
 }
 
 type scrapeWork struct {
@@ -124,7 +124,7 @@ var (
 	scrapeDuration              = metrics.NewHistogram("vm_promscrape_scrape_duration_seconds")
 	scrapeResponseSize          = metrics.NewHistogram("vm_promscrape_scrape_response_size_bytes")
 	scrapedSamples              = metrics.NewHistogram("vm_promscrape_scraped_samples")
-	scrapesSkippedByScrapeLimit = metrics.NewCounter("vm_promscrape_scrapes_skipped_by_scrape_limit_total")
+	scrapesSkippedBySampleLimit = metrics.NewCounter("vm_promscrape_scrapes_skipped_by_sample_limit_total")
 	scrapesFailed               = metrics.NewCounter("vm_promscrape_scrapes_failed_total")
 	pushDataDuration            = metrics.NewHistogram("vm_promscrape_push_data_duration_seconds")
 )
@@ -151,10 +151,10 @@ func (sw *scrapeWork) scrapeInternal(timestamp int64) error {
 		sw.addRowToTimeseries(&srcRows[i], timestamp)
 	}
 	sw.rows.Reset()
-	if sw.Config.ScrapeLimit > 0 && len(sw.writeRequest.Timeseries) > sw.Config.ScrapeLimit {
+	if sw.Config.SampleLimit > 0 && len(sw.writeRequest.Timeseries) > sw.Config.SampleLimit {
 		prompbmarshal.ResetWriteRequest(&sw.writeRequest)
 		up = 0
-		scrapesSkippedByScrapeLimit.Inc()
+		scrapesSkippedBySampleLimit.Inc()
 	}
 	samplesPostRelabeling := len(sw.writeRequest.Timeseries)
 	sw.addAutoTimeseries("up", float64(up), timestamp)
diff --git a/lib/promscrape/scrapework_test.go b/lib/promscrape/scrapework_test.go
index 3788ea2e7..7f20cae4c 100644
--- a/lib/promscrape/scrapework_test.go
+++ b/lib/promscrape/scrapework_test.go
@@ -243,7 +243,7 @@ func TestScrapeWorkScrapeInternalSuccess(t *testing.T) {
 		bar{a="b",c="d"} -3e4
 	`, &ScrapeWork{
 		HonorLabels: true,
-		ScrapeLimit: 1,
+		SampleLimit: 1,
 	}, `
 		up 0 123
 		scrape_samples_scraped 2 123