VictoriaMetrics/lib/proxy/proxy.go
Aliaksandr Valialkin bc7cf4950b
lib/promscrape: use the standard net/http.Client instead of fasthttp.Client for scraping targets in non-streaming mode
While fasthttp.Client uses less CPU and RAM when scraping targets with small responses (up to 10K metrics),
it doesn't work well when scraping targets with big responses such as kube-state-metrics.
In this case it could use big amounts of additional memory comparing to net/http.Client,
since fasthttp.Client reads the full response in memory and then tries re-using the large buffer
for further scrapes.

Additionally, fasthttp.Client-based scraping had various issues with proxying, redirects
and scrape timeouts like the following ones:

- https://github.com/VictoriaMetrics/VictoriaMetrics/issues/1945
- https://github.com/VictoriaMetrics/VictoriaMetrics/issues/5425
- https://github.com/VictoriaMetrics/VictoriaMetrics/issues/2794
- https://github.com/VictoriaMetrics/VictoriaMetrics/issues/1017

This should help reducing memory usage for the case when target returns big response
and this response is scraped by fasthttp.Client at first before switching to stream parsing mode
for subsequent scrapes. Now the switch to stream parsing mode is performed on the first scrape
after reading the response body in memory and noticing that its size exceeds the value passed
to -promscrape.minResponseSizeForStreamParse command-line flag.
Updates https://github.com/VictoriaMetrics/VictoriaMetrics/issues/5567

Overrides https://github.com/VictoriaMetrics/VictoriaMetrics/pull/4931
2024-01-30 18:39:10 +02:00

123 lines
2.8 KiB
Go

package proxy
import (
"encoding/base64"
"fmt"
"net/http"
"net/url"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/logger"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/promauth"
)
var validURLSchemes = []string{"http", "https", "socks5", "tls+socks5"}
func isURLSchemeValid(scheme string) bool {
for _, vs := range validURLSchemes {
if scheme == vs {
return true
}
}
return false
}
// URL implements YAML.Marshaler and yaml.Unmarshaler interfaces for url.URL.
type URL struct {
URL *url.URL
}
// MustNewURL returns new URL for the given u.
func MustNewURL(u string) *URL {
pu, err := url.Parse(u)
if err != nil {
logger.Panicf("BUG: cannot parse u=%q: %s", u, err)
}
return &URL{
URL: pu,
}
}
// GetURL return the underlying url.
func (u *URL) GetURL() *url.URL {
if u == nil || u.URL == nil {
return nil
}
return u.URL
}
// IsHTTPOrHTTPS returns true if u is http or https
func (u *URL) IsHTTPOrHTTPS() bool {
pu := u.GetURL()
if pu == nil {
return false
}
scheme := u.URL.Scheme
return scheme == "http" || scheme == "https"
}
// String returns string representation of u.
func (u *URL) String() string {
pu := u.GetURL()
if pu == nil {
return ""
}
return pu.String()
}
// SetHeaders sets headers to req according to u and ac configs.
func (u *URL) SetHeaders(ac *promauth.Config, req *http.Request) error {
ah, err := u.getAuthHeader(ac)
if err != nil {
return fmt.Errorf("cannot obtain Proxy-Authorization headers: %w", err)
}
if ah != "" {
req.Header.Set("Proxy-Authorization", ah)
}
return ac.SetHeaders(req, false)
}
// getAuthHeader returns Proxy-Authorization auth header for the given u and ac.
func (u *URL) getAuthHeader(ac *promauth.Config) (string, error) {
authHeader := ""
if ac != nil {
var err error
authHeader, err = ac.GetAuthHeader()
if err != nil {
return "", err
}
}
if u == nil || u.URL == nil {
return authHeader, nil
}
pu := u.URL
if pu.User != nil && len(pu.User.Username()) > 0 {
userPasswordEncoded := base64.StdEncoding.EncodeToString([]byte(pu.User.String()))
authHeader = "Basic " + userPasswordEncoded
}
return authHeader, nil
}
// MarshalYAML implements yaml.Marshaler interface.
func (u *URL) MarshalYAML() (interface{}, error) {
if u.URL == nil {
return nil, nil
}
return u.URL.String(), nil
}
// UnmarshalYAML implements yaml.Unmarshaler interface.
func (u *URL) UnmarshalYAML(unmarshal func(interface{}) error) error {
var s string
if err := unmarshal(&s); err != nil {
return err
}
parsedURL, err := url.Parse(s)
if err != nil {
return fmt.Errorf("cannot parse proxy_url=%q as *url.URL: %w", s, err)
}
if !isURLSchemeValid(parsedURL.Scheme) {
return fmt.Errorf("cannot parse proxy_url=%q unsupported scheme format=%q, valid schemes: %s", s, parsedURL.Scheme, validURLSchemes)
}
u.URL = parsedURL
return nil
}