lib/promscrape: properly set Host header when sending requests via http proxy

Updates https://github.com/VictoriaMetrics/VictoriaMetrics/issues/2794
This commit is contained in:
Aliaksandr Valialkin 2022-07-07 02:25:31 +03:00
parent 9dd5d3a431
commit 5794886662
No known key found for this signature in database
GPG key ID: A72BEC6CD3D0DED1
4 changed files with 37 additions and 36 deletions

View file

@ -61,6 +61,7 @@ scrape_configs:
* BUGFIX: [vmagent](https://docs.victoriametrics.com/vmagent.html): properly add service-level labels (`__meta_kubernetes_service_*`) to discovered targets for `role: endpointslice` in [kubernetes_sd_config](https://prometheus.io/docs/prometheus/latest/configuration/configuration/#kubernetes_sd_config). Previously these labels were missing. See [this issue](https://github.com/VictoriaMetrics/VictoriaMetrics/issues/2823). * BUGFIX: [vmagent](https://docs.victoriametrics.com/vmagent.html): properly add service-level labels (`__meta_kubernetes_service_*`) to discovered targets for `role: endpointslice` in [kubernetes_sd_config](https://prometheus.io/docs/prometheus/latest/configuration/configuration/#kubernetes_sd_config). Previously these labels were missing. See [this issue](https://github.com/VictoriaMetrics/VictoriaMetrics/issues/2823).
* BUGFIX: [vmagent](https://docs.victoriametrics.com/vmagent.html): make sure that [stale markers](https://docs.victoriametrics.com/vmagent.html#prometheus-staleness-markers) are generated with the actual timestamp when unsuccessful scrape occurs. This should prevent from possible time series overlap on scrape target restart in dynmaic envirnoments such as Kubernetes. * BUGFIX: [vmagent](https://docs.victoriametrics.com/vmagent.html): make sure that [stale markers](https://docs.victoriametrics.com/vmagent.html#prometheus-staleness-markers) are generated with the actual timestamp when unsuccessful scrape occurs. This should prevent from possible time series overlap on scrape target restart in dynmaic envirnoments such as Kubernetes.
* BUGFIX: [vmagent](https://docs.victoriametrics.com/vmagent.html): properly reload changed `-promscrape.config` file when `-promscrape.configCheckInterval` option is set. The changed config file wasn't reloaded in this case since [v1.69.0](#v1690). See [this pull request](https://github.com/VictoriaMetrics/VictoriaMetrics/pull/2786). Thanks to @ttyv for the fix. * BUGFIX: [vmagent](https://docs.victoriametrics.com/vmagent.html): properly reload changed `-promscrape.config` file when `-promscrape.configCheckInterval` option is set. The changed config file wasn't reloaded in this case since [v1.69.0](#v1690). See [this pull request](https://github.com/VictoriaMetrics/VictoriaMetrics/pull/2786). Thanks to @ttyv for the fix.
* BUGFIX: [vmagent](https://docs.victoriametrics.com/vmagent.html): properly set `Host` header during target scraping when `proxy_url` is set to http proxy. Previously the `Host` header was set to the proxy hostname instead of the target hostname. See [this issue](https://github.com/VictoriaMetrics/VictoriaMetrics/issues/2794).
* BUGFIX: [VictoriaMetrics cluster](https://docs.victoriametrics.com/Cluster-VictoriaMetrics.html): assume that the response is complete if `-search.denyPartialResponse` is enabled and up to `-replicationFactor - 1` `vmstorage` nodes are unavailable. See [this issue](https://github.com/VictoriaMetrics/VictoriaMetrics/issues/1767). * BUGFIX: [VictoriaMetrics cluster](https://docs.victoriametrics.com/Cluster-VictoriaMetrics.html): assume that the response is complete if `-search.denyPartialResponse` is enabled and up to `-replicationFactor - 1` `vmstorage` nodes are unavailable. See [this issue](https://github.com/VictoriaMetrics/VictoriaMetrics/issues/1767).
* BUGFIX: [vmselect](https://docs.victoriametrics.com/#vmselect): update `vm_partial_results_total` metric labels to be consistent with `vm_requests_total` labels. * BUGFIX: [vmselect](https://docs.victoriametrics.com/#vmselect): update `vm_partial_results_total` metric labels to be consistent with `vm_requests_total` labels.
* FEATURE: accept tags without values when reading data in [DataDog format](https://docs.victoriametrics.com/Single-server-VictoriaMetrics.html#how-to-send-data-from-datadog-agent). Thanks to @PerGon for the [pull request](https://github.com/VictoriaMetrics/VictoriaMetrics/pull/2839). * FEATURE: accept tags without values when reading data in [DataDog format](https://docs.victoriametrics.com/Single-server-VictoriaMetrics.html#how-to-send-data-from-datadog-agent). Thanks to @PerGon for the [pull request](https://github.com/VictoriaMetrics/VictoriaMetrics/pull/2839).

View file

@ -46,7 +46,7 @@ type client struct {
scrapeURL string scrapeURL string
scrapeTimeoutSecondsStr string scrapeTimeoutSecondsStr string
host string hostPort string
requestURI string requestURI string
setHeaders func(req *http.Request) setHeaders func(req *http.Request)
setProxyHeaders func(req *http.Request) setProxyHeaders func(req *http.Request)
@ -57,10 +57,21 @@ type client struct {
disableKeepAlive bool disableKeepAlive bool
} }
func addMissingPort(addr string, isTLS bool) string {
if strings.Contains(addr, ":") {
return addr
}
if isTLS {
return addr + ":443"
}
return addr + ":80"
}
func newClient(sw *ScrapeWork) *client { func newClient(sw *ScrapeWork) *client {
var u fasthttp.URI var u fasthttp.URI
u.Update(sw.ScrapeURL) u.Update(sw.ScrapeURL)
host := string(u.Host()) hostPort := string(u.Host())
dialAddr := hostPort
requestURI := string(u.RequestURI()) requestURI := string(u.RequestURI())
isTLS := string(u.Scheme()) == "https" isTLS := string(u.Scheme()) == "https"
var tlsCfg *tls.Config var tlsCfg *tls.Config
@ -75,7 +86,7 @@ func newClient(sw *ScrapeWork) *client {
// like net/http package from Go does. // like net/http package from Go does.
// See https://en.wikipedia.org/wiki/Proxy_server#Web_proxy_servers // See https://en.wikipedia.org/wiki/Proxy_server#Web_proxy_servers
pu := proxyURL.GetURL() pu := proxyURL.GetURL()
host = pu.Host dialAddr = pu.Host
requestURI = sw.ScrapeURL requestURI = sw.ScrapeURL
isTLS = pu.Scheme == "https" isTLS = pu.Scheme == "https"
if isTLS { if isTLS {
@ -90,19 +101,14 @@ func newClient(sw *ScrapeWork) *client {
} }
proxyURL = &proxy.URL{} proxyURL = &proxy.URL{}
} }
if !strings.Contains(host, ":") { hostPort = addMissingPort(hostPort, isTLS)
if !isTLS { dialAddr = addMissingPort(dialAddr, isTLS)
host += ":80"
} else {
host += ":443"
}
}
dialFunc, err := newStatDialFunc(proxyURL, sw.ProxyAuthConfig) dialFunc, err := newStatDialFunc(proxyURL, sw.ProxyAuthConfig)
if err != nil { if err != nil {
logger.Fatalf("cannot create dial func: %s", err) logger.Fatalf("cannot create dial func: %s", err)
} }
hc := &fasthttp.HostClient{ hc := &fasthttp.HostClient{
Addr: host, Addr: dialAddr,
Name: "vm_promscrape", Name: "vm_promscrape",
Dial: dialFunc, Dial: dialFunc,
IsTLS: isTLS, IsTLS: isTLS,
@ -152,7 +158,7 @@ func newClient(sw *ScrapeWork) *client {
sc: sc, sc: sc,
scrapeURL: sw.ScrapeURL, scrapeURL: sw.ScrapeURL,
scrapeTimeoutSecondsStr: fmt.Sprintf("%.3f", sw.ScrapeTimeout.Seconds()), scrapeTimeoutSecondsStr: fmt.Sprintf("%.3f", sw.ScrapeTimeout.Seconds()),
host: host, hostPort: hostPort,
requestURI: requestURI, requestURI: requestURI,
setHeaders: func(req *http.Request) { sw.AuthConfig.SetHeaders(req, true) }, setHeaders: func(req *http.Request) { sw.AuthConfig.SetHeaders(req, true) },
setProxyHeaders: setProxyHeaders, setProxyHeaders: setProxyHeaders,
@ -218,7 +224,7 @@ func (c *client) ReadData(dst []byte) ([]byte, error) {
deadline := time.Now().Add(c.hc.ReadTimeout) deadline := time.Now().Add(c.hc.ReadTimeout)
req := fasthttp.AcquireRequest() req := fasthttp.AcquireRequest()
req.SetRequestURI(c.requestURI) req.SetRequestURI(c.requestURI)
req.Header.SetHost(c.host) req.Header.SetHost(c.hostPort)
// The following `Accept` header has been copied from Prometheus sources. // The following `Accept` header has been copied from Prometheus sources.
// See https://github.com/prometheus/prometheus/blob/f9d21f10ecd2a343a381044f131ea4e46381ce09/scrape/scrape.go#L532 . // See https://github.com/prometheus/prometheus/blob/f9d21f10ecd2a343a381044f131ea4e46381ce09/scrape/scrape.go#L532 .
// This is needed as a workaround for scraping stupid Java-based servers such as Spring Boot. // This is needed as a workaround for scraping stupid Java-based servers such as Spring Boot.

View file

@ -1143,7 +1143,7 @@ func (swc *scrapeWorkConfig) getScrapeWork(target string, extraLabels, metaLabel
droppedTargetsMap.Register(originalLabels) droppedTargetsMap.Register(originalLabels)
return nil, nil return nil, nil
} }
addressRelabeled = addMissingPort(schemeRelabeled, addressRelabeled) addressRelabeled = addMissingPort(addressRelabeled, schemeRelabeled == "https")
metricsPathRelabeled := promrelabel.GetLabelValueByName(labels, "__metrics_path__") metricsPathRelabeled := promrelabel.GetLabelValueByName(labels, "__metrics_path__")
if metricsPathRelabeled == "" { if metricsPathRelabeled == "" {
metricsPathRelabeled = "/metrics" metricsPathRelabeled = "/metrics"
@ -1360,18 +1360,6 @@ func appendLabel(dst []prompbmarshal.Label, name, value string) []prompbmarshal.
}) })
} }
func addMissingPort(scheme, target string) string {
if strings.Contains(target, ":") {
return target
}
if scheme == "https" {
target += ":443"
} else {
target += ":80"
}
return target
}
const ( const (
defaultScrapeInterval = time.Minute defaultScrapeInterval = time.Minute
defaultScrapeTimeout = 10 * time.Second defaultScrapeTimeout = 10 * time.Second

View file

@ -48,6 +48,16 @@ type Client struct {
sendFullURL bool sendFullURL bool
} }
func addMissingPort(addr string, isTLS bool) string {
if strings.Contains(addr, ":") {
return addr
}
if isTLS {
return addr + ":443"
}
return addr + ":80"
}
// NewClient returns new Client for the given args. // NewClient returns new Client for the given args.
func NewClient(apiServer string, ac *promauth.Config, proxyURL *proxy.URL, proxyAC *promauth.Config) (*Client, error) { func NewClient(apiServer string, ac *promauth.Config, proxyURL *proxy.URL, proxyAC *promauth.Config) (*Client, error) {
var u fasthttp.URI var u fasthttp.URI
@ -64,6 +74,7 @@ func NewClient(apiServer string, ac *promauth.Config, proxyURL *proxy.URL, proxy
} }
hostPort := string(u.Host()) hostPort := string(u.Host())
dialAddr := hostPort
isTLS := string(u.Scheme()) == "https" isTLS := string(u.Scheme()) == "https"
var tlsCfg *tls.Config var tlsCfg *tls.Config
if isTLS { if isTLS {
@ -76,7 +87,7 @@ func NewClient(apiServer string, ac *promauth.Config, proxyURL *proxy.URL, proxy
// like net/http package from Go does. // like net/http package from Go does.
// See https://en.wikipedia.org/wiki/Proxy_server#Web_proxy_servers // See https://en.wikipedia.org/wiki/Proxy_server#Web_proxy_servers
pu := proxyURL.GetURL() pu := proxyURL.GetURL()
hostPort = pu.Host dialAddr = pu.Host
isTLS = pu.Scheme == "https" isTLS = pu.Scheme == "https"
if isTLS { if isTLS {
tlsCfg = proxyAC.NewTLSConfig() tlsCfg = proxyAC.NewTLSConfig()
@ -87,13 +98,8 @@ func NewClient(apiServer string, ac *promauth.Config, proxyURL *proxy.URL, proxy
} }
proxyURL = &proxy.URL{} proxyURL = &proxy.URL{}
} }
if !strings.Contains(hostPort, ":") { hostPort = addMissingPort(hostPort, isTLS)
port := "80" dialAddr = addMissingPort(dialAddr, isTLS)
if isTLS {
port = "443"
}
hostPort = net.JoinHostPort(hostPort, port)
}
if dialFunc == nil { if dialFunc == nil {
var err error var err error
dialFunc, err = proxyURL.NewDialFunc(proxyAC) dialFunc, err = proxyURL.NewDialFunc(proxyAC)
@ -102,7 +108,7 @@ func NewClient(apiServer string, ac *promauth.Config, proxyURL *proxy.URL, proxy
} }
} }
hc := &fasthttp.HostClient{ hc := &fasthttp.HostClient{
Addr: hostPort, Addr: dialAddr,
Name: "vm_promscrape/discovery", Name: "vm_promscrape/discovery",
IsTLS: isTLS, IsTLS: isTLS,
TLSConfig: tlsCfg, TLSConfig: tlsCfg,
@ -113,7 +119,7 @@ func NewClient(apiServer string, ac *promauth.Config, proxyURL *proxy.URL, proxy
Dial: dialFunc, Dial: dialFunc,
} }
blockingClient := &fasthttp.HostClient{ blockingClient := &fasthttp.HostClient{
Addr: hostPort, Addr: dialAddr,
Name: "vm_promscrape/discovery", Name: "vm_promscrape/discovery",
IsTLS: isTLS, IsTLS: isTLS,
TLSConfig: tlsCfg, TLSConfig: tlsCfg,