lib/promscrape: add follow_redirect option to scrape_configs section like Prometheus does

See https://github.com/prometheus/prometheus/pull/8546
This commit is contained in:
Aliaksandr Valialkin 2021-04-02 19:56:38 +03:00
parent d1dcbfd0f9
commit 7f9c68cdcb
5 changed files with 36 additions and 9 deletions

View file

@ -3,6 +3,8 @@
# tip # tip
* FEATURE: vminsert and vmagent: add `-sortLabels` command-line flag for sorting metric labels before pushing them to `vmstorage`. This should reduce the size of `MetricName -> internal_series_id` cache (aka `vm_cache_size_bytes{type="storage/tsid"}`) when ingesting samples for the same time series with distinct order of labels. For example, `foo{k1="v1",k2="v2"}` and `foo{k2="v2",k1="v1"}` represent a single time series. * FEATURE: vminsert and vmagent: add `-sortLabels` command-line flag for sorting metric labels before pushing them to `vmstorage`. This should reduce the size of `MetricName -> internal_series_id` cache (aka `vm_cache_size_bytes{type="storage/tsid"}`) when ingesting samples for the same time series with distinct order of labels. For example, `foo{k1="v1",k2="v2"}` and `foo{k2="v2",k1="v1"}` represent a single time series.
* FEATURE: update Go builder from `v1.16.2` to `v1.16.3`. This should fix [these issues](https://github.com/golang/go/issues?q=milestone%3AGo1.16.3+label%3ACherryPickApproved).
* FEATURE: vmagent: add support for `follow_redirects` option to `scrape_configs` section in the same way as [Prometheus does](https://github.com/prometheus/prometheus/pull/8546).
* FEATURE: vmagent: reduce memory usage when `-remoteWrite.queues` is set to a big value. See [this issue](https://github.com/VictoriaMetrics/VictoriaMetrics/issues/1167). * FEATURE: vmagent: reduce memory usage when `-remoteWrite.queues` is set to a big value. See [this issue](https://github.com/VictoriaMetrics/VictoriaMetrics/issues/1167).
* FEATURE: vmagent: add AWS IAM roles for tasks support for EC2 service discovery according to [these docs](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/task-iam-roles.html). * FEATURE: vmagent: add AWS IAM roles for tasks support for EC2 service discovery according to [these docs](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/task-iam-roles.html).

View file

@ -46,6 +46,7 @@ type client struct {
host string host string
requestURI string requestURI string
authHeader string authHeader string
denyRedirects bool
disableCompression bool disableCompression bool
disableKeepAlive bool disableKeepAlive bool
} }
@ -101,6 +102,11 @@ func newClient(sw *ScrapeWork) *client {
}, },
Timeout: sw.ScrapeTimeout, Timeout: sw.ScrapeTimeout,
} }
if sw.DenyRedirects {
sc.CheckRedirect = func(req *http.Request, via []*http.Request) error {
return http.ErrUseLastResponse
}
}
} }
return &client{ return &client{
hc: hc, hc: hc,
@ -109,6 +115,7 @@ func newClient(sw *ScrapeWork) *client {
host: host, host: host,
requestURI: requestURI, requestURI: requestURI,
authHeader: sw.AuthConfig.Authorization, authHeader: sw.AuthConfig.Authorization,
denyRedirects: sw.DenyRedirects,
disableCompression: sw.DisableCompression, disableCompression: sw.DisableCompression,
disableKeepAlive: sw.DisableKeepAlive, disableKeepAlive: sw.DisableKeepAlive,
} }
@ -181,13 +188,17 @@ func (c *client) ReadData(dst []byte) ([]byte, error) {
err := doRequestWithPossibleRetry(c.hc, req, resp, deadline) err := doRequestWithPossibleRetry(c.hc, req, resp, deadline)
statusCode := resp.StatusCode() statusCode := resp.StatusCode()
if err == nil && (statusCode == fasthttp.StatusMovedPermanently || statusCode == fasthttp.StatusFound) { if err == nil && (statusCode == fasthttp.StatusMovedPermanently || statusCode == fasthttp.StatusFound) {
// Allow a single redirect. if c.denyRedirects {
// It is expected that the redirect is made on the same host. err = fmt.Errorf("cannot follow redirects if `follow_redirects: false` is set")
// Otherwise it won't work. } else {
if location := resp.Header.Peek("Location"); len(location) > 0 { // Allow a single redirect.
req.URI().UpdateBytes(location) // It is expected that the redirect is made on the same host.
err = c.hc.DoDeadline(req, resp, deadline) // Otherwise it won't work.
statusCode = resp.StatusCode() if location := resp.Header.Peek("Location"); len(location) > 0 {
req.URI().UpdateBytes(location)
err = c.hc.DoDeadline(req, resp, deadline)
statusCode = resp.StatusCode()
}
} }
} }
if swapResponseBodies { if swapResponseBodies {

View file

@ -88,6 +88,7 @@ type ScrapeConfig struct {
MetricsPath string `yaml:"metrics_path,omitempty"` MetricsPath string `yaml:"metrics_path,omitempty"`
HonorLabels bool `yaml:"honor_labels,omitempty"` HonorLabels bool `yaml:"honor_labels,omitempty"`
HonorTimestamps bool `yaml:"honor_timestamps,omitempty"` HonorTimestamps bool `yaml:"honor_timestamps,omitempty"`
FollowRedirects *bool `yaml:"follow_redirects"` // omitempty isn't set, since the default value for this flag is true.
Scheme string `yaml:"scheme,omitempty"` Scheme string `yaml:"scheme,omitempty"`
Params map[string][]string `yaml:"params,omitempty"` Params map[string][]string `yaml:"params,omitempty"`
BasicAuth *promauth.BasicAuthConfig `yaml:"basic_auth,omitempty"` BasicAuth *promauth.BasicAuthConfig `yaml:"basic_auth,omitempty"`
@ -531,6 +532,10 @@ func getScrapeWorkConfig(sc *ScrapeConfig, baseDir string, globalCfg *GlobalConf
} }
honorLabels := sc.HonorLabels honorLabels := sc.HonorLabels
honorTimestamps := sc.HonorTimestamps honorTimestamps := sc.HonorTimestamps
denyRedirects := false
if sc.FollowRedirects != nil {
denyRedirects = !*sc.FollowRedirects
}
metricsPath := sc.MetricsPath metricsPath := sc.MetricsPath
if metricsPath == "" { if metricsPath == "" {
metricsPath = "/metrics" metricsPath = "/metrics"
@ -571,6 +576,7 @@ func getScrapeWorkConfig(sc *ScrapeConfig, baseDir string, globalCfg *GlobalConf
authConfig: ac, authConfig: ac,
honorLabels: honorLabels, honorLabels: honorLabels,
honorTimestamps: honorTimestamps, honorTimestamps: honorTimestamps,
denyRedirects: denyRedirects,
externalLabels: globalCfg.ExternalLabels, externalLabels: globalCfg.ExternalLabels,
relabelConfigs: relabelConfigs, relabelConfigs: relabelConfigs,
metricRelabelConfigs: metricRelabelConfigs, metricRelabelConfigs: metricRelabelConfigs,
@ -596,6 +602,7 @@ type scrapeWorkConfig struct {
authConfig *promauth.Config authConfig *promauth.Config
honorLabels bool honorLabels bool
honorTimestamps bool honorTimestamps bool
denyRedirects bool
externalLabels map[string]string externalLabels map[string]string
relabelConfigs *promrelabel.ParsedConfigs relabelConfigs *promrelabel.ParsedConfigs
metricRelabelConfigs *promrelabel.ParsedConfigs metricRelabelConfigs *promrelabel.ParsedConfigs
@ -856,6 +863,7 @@ func (swc *scrapeWorkConfig) getScrapeWork(target string, extraLabels, metaLabel
ScrapeTimeout: swc.scrapeTimeout, ScrapeTimeout: swc.scrapeTimeout,
HonorLabels: swc.honorLabels, HonorLabels: swc.honorLabels,
HonorTimestamps: swc.honorTimestamps, HonorTimestamps: swc.honorTimestamps,
DenyRedirects: swc.denyRedirects,
OriginalLabels: originalLabels, OriginalLabels: originalLabels,
Labels: labels, Labels: labels,
ProxyURL: swc.proxyURL, ProxyURL: swc.proxyURL,

View file

@ -751,6 +751,7 @@ scrape_configs:
scheme: https scheme: https
honor_labels: true honor_labels: true
honor_timestamps: true honor_timestamps: true
follow_redirects: false
params: params:
p: ["x&y", "="] p: ["x&y", "="]
xaa: xaa:
@ -779,6 +780,7 @@ scrape_configs:
ScrapeTimeout: 12 * time.Second, ScrapeTimeout: 12 * time.Second,
HonorLabels: true, HonorLabels: true,
HonorTimestamps: true, HonorTimestamps: true,
DenyRedirects: true,
Labels: []prompbmarshal.Label{ Labels: []prompbmarshal.Label{
{ {
Name: "__address__", Name: "__address__",
@ -824,6 +826,7 @@ scrape_configs:
ScrapeTimeout: 12 * time.Second, ScrapeTimeout: 12 * time.Second,
HonorLabels: true, HonorLabels: true,
HonorTimestamps: true, HonorTimestamps: true,
DenyRedirects: true,
Labels: []prompbmarshal.Label{ Labels: []prompbmarshal.Label{
{ {
Name: "__address__", Name: "__address__",

View file

@ -48,6 +48,9 @@ type ScrapeWork struct {
// See https://prometheus.io/docs/prometheus/latest/configuration/configuration/#scrape_config // See https://prometheus.io/docs/prometheus/latest/configuration/configuration/#scrape_config
HonorTimestamps bool HonorTimestamps bool
// Whether to deny redirects during requests to scrape config.
DenyRedirects bool
// OriginalLabels contains original labels before relabeling. // OriginalLabels contains original labels before relabeling.
// //
// These labels are needed for relabeling troubleshooting at /targets page. // These labels are needed for relabeling troubleshooting at /targets page.
@ -107,10 +110,10 @@ type ScrapeWork struct {
// it can be used for comparing for equality for two ScrapeWork objects. // it can be used for comparing for equality for two ScrapeWork objects.
func (sw *ScrapeWork) key() string { func (sw *ScrapeWork) key() string {
// Do not take into account OriginalLabels. // Do not take into account OriginalLabels.
key := fmt.Sprintf("ScrapeURL=%s, ScrapeInterval=%s, ScrapeTimeout=%s, HonorLabels=%v, HonorTimestamps=%v, Labels=%s, "+ key := fmt.Sprintf("ScrapeURL=%s, ScrapeInterval=%s, ScrapeTimeout=%s, HonorLabels=%v, HonorTimestamps=%v, DenyRedirects=%v, Labels=%s, "+
"ProxyURL=%s, ProxyAuthConfig=%s, AuthConfig=%s, MetricRelabelConfigs=%s, SampleLimit=%d, DisableCompression=%v, DisableKeepAlive=%v, StreamParse=%v, "+ "ProxyURL=%s, ProxyAuthConfig=%s, AuthConfig=%s, MetricRelabelConfigs=%s, SampleLimit=%d, DisableCompression=%v, DisableKeepAlive=%v, StreamParse=%v, "+
"ScrapeAlignInterval=%s, ScrapeOffset=%s", "ScrapeAlignInterval=%s, ScrapeOffset=%s",
sw.ScrapeURL, sw.ScrapeInterval, sw.ScrapeTimeout, sw.HonorLabels, sw.HonorTimestamps, sw.LabelsString(), sw.ScrapeURL, sw.ScrapeInterval, sw.ScrapeTimeout, sw.HonorLabels, sw.HonorTimestamps, sw.DenyRedirects, sw.LabelsString(),
sw.ProxyURL.String(), sw.ProxyAuthConfig.String(), sw.ProxyURL.String(), sw.ProxyAuthConfig.String(),
sw.AuthConfig.String(), sw.MetricRelabelConfigs.String(), sw.SampleLimit, sw.DisableCompression, sw.DisableKeepAlive, sw.StreamParse, sw.AuthConfig.String(), sw.MetricRelabelConfigs.String(), sw.SampleLimit, sw.DisableCompression, sw.DisableKeepAlive, sw.StreamParse,
sw.ScrapeAlignInterval, sw.ScrapeOffset) sw.ScrapeAlignInterval, sw.ScrapeOffset)