mirror of
https://github.com/VictoriaMetrics/VictoriaMetrics.git
synced 2025-01-20 15:16:42 +00:00
lib/promscrape: rename 'scrape_config->scrape_limit' to 'scrape_config->sample_limit'
`scrape_config` block from Prometheus config contains `sample_limit` field, while in `vmagent` this field was mistakenly named as `scrape_limit`.
This commit is contained in:
parent
755f649c72
commit
f58d15f27c
3 changed files with 9 additions and 10 deletions
|
@ -61,7 +61,7 @@ type ScrapeConfig struct {
|
|||
KubernetesSDConfigs []KubernetesSDConfig `yaml:"kubernetes_sd_configs"`
|
||||
RelabelConfigs []promrelabel.RelabelConfig `yaml:"relabel_configs"`
|
||||
MetricRelabelConfigs []promrelabel.RelabelConfig `yaml:"metric_relabel_configs"`
|
||||
ScrapeLimit int `yaml:"scrape_limit"`
|
||||
SampleLimit int `yaml:"sample_limit"`
|
||||
|
||||
// This is set in loadConfig
|
||||
swc *scrapeWorkConfig
|
||||
|
@ -264,7 +264,6 @@ func getScrapeWorkConfig(sc *ScrapeConfig, baseDir string, globalCfg *GlobalConf
|
|||
if err != nil {
|
||||
return nil, fmt.Errorf("cannot parse `metric_relabel_configs` for `job_name` %q: %s", jobName, err)
|
||||
}
|
||||
scrapeLimit := sc.ScrapeLimit
|
||||
swc := &scrapeWorkConfig{
|
||||
scrapeInterval: scrapeInterval,
|
||||
scrapeTimeout: scrapeTimeout,
|
||||
|
@ -278,7 +277,7 @@ func getScrapeWorkConfig(sc *ScrapeConfig, baseDir string, globalCfg *GlobalConf
|
|||
externalLabels: globalCfg.ExternalLabels,
|
||||
relabelConfigs: relabelConfigs,
|
||||
metricRelabelConfigs: metricRelabelConfigs,
|
||||
scrapeLimit: scrapeLimit,
|
||||
sampleLimit: sc.SampleLimit,
|
||||
}
|
||||
return swc, nil
|
||||
}
|
||||
|
@ -296,7 +295,7 @@ type scrapeWorkConfig struct {
|
|||
externalLabels map[string]string
|
||||
relabelConfigs []promrelabel.ParsedRelabelConfig
|
||||
metricRelabelConfigs []promrelabel.ParsedRelabelConfig
|
||||
scrapeLimit int
|
||||
sampleLimit int
|
||||
}
|
||||
|
||||
func (sdc *KubernetesSDConfig) appendScrapeWork(dst []ScrapeWork, baseDir string, swc *scrapeWorkConfig) []ScrapeWork {
|
||||
|
@ -481,7 +480,7 @@ func appendScrapeWork(dst []ScrapeWork, swc *scrapeWorkConfig, target string, ex
|
|||
Labels: labels,
|
||||
AuthConfig: swc.authConfig,
|
||||
MetricRelabelConfigs: swc.metricRelabelConfigs,
|
||||
ScrapeLimit: swc.scrapeLimit,
|
||||
SampleLimit: swc.sampleLimit,
|
||||
})
|
||||
return dst, nil
|
||||
}
|
||||
|
|
|
@ -54,7 +54,7 @@ type ScrapeWork struct {
|
|||
MetricRelabelConfigs []promrelabel.ParsedRelabelConfig
|
||||
|
||||
// The maximum number of metrics to scrape after relabeling.
|
||||
ScrapeLimit int
|
||||
SampleLimit int
|
||||
}
|
||||
|
||||
type scrapeWork struct {
|
||||
|
@ -124,7 +124,7 @@ var (
|
|||
scrapeDuration = metrics.NewHistogram("vm_promscrape_scrape_duration_seconds")
|
||||
scrapeResponseSize = metrics.NewHistogram("vm_promscrape_scrape_response_size_bytes")
|
||||
scrapedSamples = metrics.NewHistogram("vm_promscrape_scraped_samples")
|
||||
scrapesSkippedByScrapeLimit = metrics.NewCounter("vm_promscrape_scrapes_skipped_by_scrape_limit_total")
|
||||
scrapesSkippedBySampleLimit = metrics.NewCounter("vm_promscrape_scrapes_skipped_by_sample_limit_total")
|
||||
scrapesFailed = metrics.NewCounter("vm_promscrape_scrapes_failed_total")
|
||||
pushDataDuration = metrics.NewHistogram("vm_promscrape_push_data_duration_seconds")
|
||||
)
|
||||
|
@ -151,10 +151,10 @@ func (sw *scrapeWork) scrapeInternal(timestamp int64) error {
|
|||
sw.addRowToTimeseries(&srcRows[i], timestamp)
|
||||
}
|
||||
sw.rows.Reset()
|
||||
if sw.Config.ScrapeLimit > 0 && len(sw.writeRequest.Timeseries) > sw.Config.ScrapeLimit {
|
||||
if sw.Config.SampleLimit > 0 && len(sw.writeRequest.Timeseries) > sw.Config.SampleLimit {
|
||||
prompbmarshal.ResetWriteRequest(&sw.writeRequest)
|
||||
up = 0
|
||||
scrapesSkippedByScrapeLimit.Inc()
|
||||
scrapesSkippedBySampleLimit.Inc()
|
||||
}
|
||||
samplesPostRelabeling := len(sw.writeRequest.Timeseries)
|
||||
sw.addAutoTimeseries("up", float64(up), timestamp)
|
||||
|
|
|
@ -243,7 +243,7 @@ func TestScrapeWorkScrapeInternalSuccess(t *testing.T) {
|
|||
bar{a="b",c="d"} -3e4
|
||||
`, &ScrapeWork{
|
||||
HonorLabels: true,
|
||||
ScrapeLimit: 1,
|
||||
SampleLimit: 1,
|
||||
}, `
|
||||
up 0 123
|
||||
scrape_samples_scraped 2 123
|
||||
|
|
Loading…
Reference in a new issue