mirror of
https://github.com/VictoriaMetrics/VictoriaMetrics.git
synced 2024-11-21 14:44:00 +00:00
lib/promscrape: add follow_redirect
option to scrape_configs
section like Prometheus does
See https://github.com/prometheus/prometheus/pull/8546
This commit is contained in:
parent
d1dcbfd0f9
commit
7f9c68cdcb
5 changed files with 36 additions and 9 deletions
|
@ -3,6 +3,8 @@
|
|||
# tip
|
||||
|
||||
* FEATURE: vminsert and vmagent: add `-sortLabels` command-line flag for sorting metric labels before pushing them to `vmstorage`. This should reduce the size of `MetricName -> internal_series_id` cache (aka `vm_cache_size_bytes{type="storage/tsid"}`) when ingesting samples for the same time series with distinct order of labels. For example, `foo{k1="v1",k2="v2"}` and `foo{k2="v2",k1="v1"}` represent a single time series.
|
||||
* FEATURE: update Go builder from `v1.16.2` to `v1.16.3`. This should fix [these issues](https://github.com/golang/go/issues?q=milestone%3AGo1.16.3+label%3ACherryPickApproved).
|
||||
* FEATURE: vmagent: add support for `follow_redirects` option to `scrape_configs` section in the same way as [Prometheus does](https://github.com/prometheus/prometheus/pull/8546).
|
||||
* FEATURE: vmagent: reduce memory usage when `-remoteWrite.queues` is set to a big value. See [this issue](https://github.com/VictoriaMetrics/VictoriaMetrics/issues/1167).
|
||||
* FEATURE: vmagent: add AWS IAM roles for tasks support for EC2 service discovery according to [these docs](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/task-iam-roles.html).
|
||||
|
||||
|
|
|
@ -46,6 +46,7 @@ type client struct {
|
|||
host string
|
||||
requestURI string
|
||||
authHeader string
|
||||
denyRedirects bool
|
||||
disableCompression bool
|
||||
disableKeepAlive bool
|
||||
}
|
||||
|
@ -101,6 +102,11 @@ func newClient(sw *ScrapeWork) *client {
|
|||
},
|
||||
Timeout: sw.ScrapeTimeout,
|
||||
}
|
||||
if sw.DenyRedirects {
|
||||
sc.CheckRedirect = func(req *http.Request, via []*http.Request) error {
|
||||
return http.ErrUseLastResponse
|
||||
}
|
||||
}
|
||||
}
|
||||
return &client{
|
||||
hc: hc,
|
||||
|
@ -109,6 +115,7 @@ func newClient(sw *ScrapeWork) *client {
|
|||
host: host,
|
||||
requestURI: requestURI,
|
||||
authHeader: sw.AuthConfig.Authorization,
|
||||
denyRedirects: sw.DenyRedirects,
|
||||
disableCompression: sw.DisableCompression,
|
||||
disableKeepAlive: sw.DisableKeepAlive,
|
||||
}
|
||||
|
@ -181,13 +188,17 @@ func (c *client) ReadData(dst []byte) ([]byte, error) {
|
|||
err := doRequestWithPossibleRetry(c.hc, req, resp, deadline)
|
||||
statusCode := resp.StatusCode()
|
||||
if err == nil && (statusCode == fasthttp.StatusMovedPermanently || statusCode == fasthttp.StatusFound) {
|
||||
// Allow a single redirect.
|
||||
// It is expected that the redirect is made on the same host.
|
||||
// Otherwise it won't work.
|
||||
if location := resp.Header.Peek("Location"); len(location) > 0 {
|
||||
req.URI().UpdateBytes(location)
|
||||
err = c.hc.DoDeadline(req, resp, deadline)
|
||||
statusCode = resp.StatusCode()
|
||||
if c.denyRedirects {
|
||||
err = fmt.Errorf("cannot follow redirects if `follow_redirects: false` is set")
|
||||
} else {
|
||||
// Allow a single redirect.
|
||||
// It is expected that the redirect is made on the same host.
|
||||
// Otherwise it won't work.
|
||||
if location := resp.Header.Peek("Location"); len(location) > 0 {
|
||||
req.URI().UpdateBytes(location)
|
||||
err = c.hc.DoDeadline(req, resp, deadline)
|
||||
statusCode = resp.StatusCode()
|
||||
}
|
||||
}
|
||||
}
|
||||
if swapResponseBodies {
|
||||
|
|
|
@ -88,6 +88,7 @@ type ScrapeConfig struct {
|
|||
MetricsPath string `yaml:"metrics_path,omitempty"`
|
||||
HonorLabels bool `yaml:"honor_labels,omitempty"`
|
||||
HonorTimestamps bool `yaml:"honor_timestamps,omitempty"`
|
||||
FollowRedirects *bool `yaml:"follow_redirects"` // omitempty isn't set, since the default value for this flag is true.
|
||||
Scheme string `yaml:"scheme,omitempty"`
|
||||
Params map[string][]string `yaml:"params,omitempty"`
|
||||
BasicAuth *promauth.BasicAuthConfig `yaml:"basic_auth,omitempty"`
|
||||
|
@ -531,6 +532,10 @@ func getScrapeWorkConfig(sc *ScrapeConfig, baseDir string, globalCfg *GlobalConf
|
|||
}
|
||||
honorLabels := sc.HonorLabels
|
||||
honorTimestamps := sc.HonorTimestamps
|
||||
denyRedirects := false
|
||||
if sc.FollowRedirects != nil {
|
||||
denyRedirects = !*sc.FollowRedirects
|
||||
}
|
||||
metricsPath := sc.MetricsPath
|
||||
if metricsPath == "" {
|
||||
metricsPath = "/metrics"
|
||||
|
@ -571,6 +576,7 @@ func getScrapeWorkConfig(sc *ScrapeConfig, baseDir string, globalCfg *GlobalConf
|
|||
authConfig: ac,
|
||||
honorLabels: honorLabels,
|
||||
honorTimestamps: honorTimestamps,
|
||||
denyRedirects: denyRedirects,
|
||||
externalLabels: globalCfg.ExternalLabels,
|
||||
relabelConfigs: relabelConfigs,
|
||||
metricRelabelConfigs: metricRelabelConfigs,
|
||||
|
@ -596,6 +602,7 @@ type scrapeWorkConfig struct {
|
|||
authConfig *promauth.Config
|
||||
honorLabels bool
|
||||
honorTimestamps bool
|
||||
denyRedirects bool
|
||||
externalLabels map[string]string
|
||||
relabelConfigs *promrelabel.ParsedConfigs
|
||||
metricRelabelConfigs *promrelabel.ParsedConfigs
|
||||
|
@ -856,6 +863,7 @@ func (swc *scrapeWorkConfig) getScrapeWork(target string, extraLabels, metaLabel
|
|||
ScrapeTimeout: swc.scrapeTimeout,
|
||||
HonorLabels: swc.honorLabels,
|
||||
HonorTimestamps: swc.honorTimestamps,
|
||||
DenyRedirects: swc.denyRedirects,
|
||||
OriginalLabels: originalLabels,
|
||||
Labels: labels,
|
||||
ProxyURL: swc.proxyURL,
|
||||
|
|
|
@ -751,6 +751,7 @@ scrape_configs:
|
|||
scheme: https
|
||||
honor_labels: true
|
||||
honor_timestamps: true
|
||||
follow_redirects: false
|
||||
params:
|
||||
p: ["x&y", "="]
|
||||
xaa:
|
||||
|
@ -779,6 +780,7 @@ scrape_configs:
|
|||
ScrapeTimeout: 12 * time.Second,
|
||||
HonorLabels: true,
|
||||
HonorTimestamps: true,
|
||||
DenyRedirects: true,
|
||||
Labels: []prompbmarshal.Label{
|
||||
{
|
||||
Name: "__address__",
|
||||
|
@ -824,6 +826,7 @@ scrape_configs:
|
|||
ScrapeTimeout: 12 * time.Second,
|
||||
HonorLabels: true,
|
||||
HonorTimestamps: true,
|
||||
DenyRedirects: true,
|
||||
Labels: []prompbmarshal.Label{
|
||||
{
|
||||
Name: "__address__",
|
||||
|
|
|
@ -48,6 +48,9 @@ type ScrapeWork struct {
|
|||
// See https://prometheus.io/docs/prometheus/latest/configuration/configuration/#scrape_config
|
||||
HonorTimestamps bool
|
||||
|
||||
// Whether to deny redirects during requests to scrape config.
|
||||
DenyRedirects bool
|
||||
|
||||
// OriginalLabels contains original labels before relabeling.
|
||||
//
|
||||
// These labels are needed for relabeling troubleshooting at /targets page.
|
||||
|
@ -107,10 +110,10 @@ type ScrapeWork struct {
|
|||
// it can be used for comparing for equality for two ScrapeWork objects.
|
||||
func (sw *ScrapeWork) key() string {
|
||||
// Do not take into account OriginalLabels.
|
||||
key := fmt.Sprintf("ScrapeURL=%s, ScrapeInterval=%s, ScrapeTimeout=%s, HonorLabels=%v, HonorTimestamps=%v, Labels=%s, "+
|
||||
key := fmt.Sprintf("ScrapeURL=%s, ScrapeInterval=%s, ScrapeTimeout=%s, HonorLabels=%v, HonorTimestamps=%v, DenyRedirects=%v, Labels=%s, "+
|
||||
"ProxyURL=%s, ProxyAuthConfig=%s, AuthConfig=%s, MetricRelabelConfigs=%s, SampleLimit=%d, DisableCompression=%v, DisableKeepAlive=%v, StreamParse=%v, "+
|
||||
"ScrapeAlignInterval=%s, ScrapeOffset=%s",
|
||||
sw.ScrapeURL, sw.ScrapeInterval, sw.ScrapeTimeout, sw.HonorLabels, sw.HonorTimestamps, sw.LabelsString(),
|
||||
sw.ScrapeURL, sw.ScrapeInterval, sw.ScrapeTimeout, sw.HonorLabels, sw.HonorTimestamps, sw.DenyRedirects, sw.LabelsString(),
|
||||
sw.ProxyURL.String(), sw.ProxyAuthConfig.String(),
|
||||
sw.AuthConfig.String(), sw.MetricRelabelConfigs.String(), sw.SampleLimit, sw.DisableCompression, sw.DisableKeepAlive, sw.StreamParse,
|
||||
sw.ScrapeAlignInterval, sw.ScrapeOffset)
|
||||
|
|
Loading…
Reference in a new issue