2020-02-23 11:35:47 +00:00
package promscrape
import (
2020-11-01 21:12:13 +00:00
"context"
2020-02-23 11:35:47 +00:00
"flag"
"fmt"
2020-11-01 21:12:13 +00:00
"io"
"net/http"
2020-12-24 08:56:10 +00:00
"net/url"
2020-02-23 11:35:47 +00:00
"strings"
"time"
2024-04-03 08:01:43 +00:00
"github.com/VictoriaMetrics/metrics"
2020-11-26 16:08:39 +00:00
"github.com/VictoriaMetrics/VictoriaMetrics/lib/bytesutil"
2020-08-16 14:05:52 +00:00
"github.com/VictoriaMetrics/VictoriaMetrics/lib/flagutil"
2024-05-22 08:52:51 +00:00
"github.com/VictoriaMetrics/VictoriaMetrics/lib/httputils"
2020-02-23 11:35:47 +00:00
)
var (
2021-11-03 20:26:56 +00:00
maxResponseHeadersSize = flagutil . NewBytes ( "promscrape.maxResponseHeadersSize" , 4096 , "The maximum size of http response headers from Prometheus scrape targets" )
disableCompression = flag . Bool ( "promscrape.disableCompression" , false , "Whether to disable sending 'Accept-Encoding: gzip' request headers to all the scrape targets. " +
2020-07-02 11:19:11 +00:00
"This may reduce CPU usage on scrape targets at the cost of higher network bandwidth utilization. " +
2023-05-10 07:50:41 +00:00
"It is possible to set 'disable_compression: true' individually per each 'scrape_config' section in '-promscrape.config' for fine-grained control" )
2020-07-02 11:19:11 +00:00
disableKeepAlive = flag . Bool ( "promscrape.disableKeepAlive" , false , "Whether to disable HTTP keep-alive connections when scraping all the targets. " +
"This may be useful when targets has no support for HTTP keep-alive connection. " +
2023-05-10 07:50:41 +00:00
"It is possible to set 'disable_keepalive: true' individually per each 'scrape_config' section in '-promscrape.config' for fine-grained control. " +
2020-07-02 11:19:11 +00:00
"Note that disabling HTTP keep-alive may increase load on both vmagent and scrape targets" )
2020-11-01 21:12:13 +00:00
streamParse = flag . Bool ( "promscrape.streamParse" , false , "Whether to enable stream parsing for metrics obtained from scrape targets. This may be useful " +
"for reducing memory usage when millions of metrics are exposed per each scrape target. " +
2023-05-10 07:50:41 +00:00
"It is possible to set 'stream_parse: true' individually per each 'scrape_config' section in '-promscrape.config' for fine-grained control" )
2024-05-07 10:09:44 +00:00
scrapeExemplars = flag . Bool ( "promscrape.scrapeExemplars" , false , "Whether to enable scraping of exemplars from scrape targets." )
2020-02-23 11:35:47 +00:00
)
type client struct {
2024-01-30 15:51:44 +00:00
c * http . Client
2023-02-09 19:13:06 +00:00
ctx context . Context
2021-04-05 09:15:07 +00:00
scrapeURL string
scrapeTimeoutSecondsStr string
2023-10-17 09:58:19 +00:00
setHeaders func ( req * http . Request ) error
setProxyHeaders func ( req * http . Request ) error
2024-06-20 11:58:42 +00:00
maxScrapeSize int64
2022-11-30 05:22:12 +00:00
}
2023-10-17 09:58:19 +00:00
func newClient ( ctx context . Context , sw * ScrapeWork ) ( * client , error ) {
2024-04-03 21:46:40 +00:00
ac := sw . AuthConfig
2024-01-30 15:51:44 +00:00
setHeaders := func ( req * http . Request ) error {
return sw . AuthConfig . SetHeaders ( req , true )
}
2024-04-02 20:16:24 +00:00
setProxyHeaders := func ( _ * http . Request ) error {
2024-01-30 15:51:44 +00:00
return nil
}
2021-04-03 21:40:08 +00:00
proxyURL := sw . ProxyURL
2024-04-03 21:46:40 +00:00
if ! strings . HasPrefix ( sw . ScrapeURL , "https://" ) && proxyURL . IsHTTPOrHTTPS ( ) {
2022-05-06 21:02:54 +00:00
pu := proxyURL . GetURL ( )
2024-01-30 15:51:44 +00:00
if pu . Scheme == "https" {
2024-04-03 21:46:40 +00:00
ac = sw . ProxyAuthConfig
2021-04-03 21:40:08 +00:00
}
2023-10-17 09:58:19 +00:00
setProxyHeaders = func ( req * http . Request ) error {
2024-01-30 15:51:44 +00:00
return proxyURL . SetHeaders ( sw . ProxyAuthConfig , req )
2021-05-14 17:00:05 +00:00
}
2020-12-24 08:56:10 +00:00
}
2021-10-16 10:18:20 +00:00
var proxyURLFunc func ( * http . Request ) ( * url . URL , error )
2022-05-06 21:02:54 +00:00
if pu := sw . ProxyURL . GetURL ( ) ; pu != nil {
2021-10-26 18:21:08 +00:00
proxyURLFunc = http . ProxyURL ( pu )
2021-10-16 10:18:20 +00:00
}
2024-01-30 15:51:44 +00:00
hc := & http . Client {
2024-04-03 21:46:40 +00:00
Transport : ac . NewRoundTripper ( & http . Transport {
Proxy : proxyURLFunc ,
TLSHandshakeTimeout : 10 * time . Second ,
IdleConnTimeout : 2 * sw . ScrapeInterval ,
DisableCompression : * disableCompression || sw . DisableCompression ,
DisableKeepAlives : * disableKeepAlive || sw . DisableKeepAlive ,
2024-05-22 08:52:51 +00:00
DialContext : httputils . GetStatDialFunc ( "vm_promscrape" ) ,
2024-04-03 21:46:40 +00:00
MaxIdleConnsPerHost : 100 ,
MaxResponseHeaderBytes : int64 ( maxResponseHeadersSize . N ) ,
} ) ,
Timeout : sw . ScrapeTimeout ,
2021-10-16 10:18:20 +00:00
}
if sw . DenyRedirects {
2024-04-02 20:16:24 +00:00
hc . CheckRedirect = func ( _ * http . Request , _ [ ] * http . Request ) error {
2021-10-16 10:18:20 +00:00
return http . ErrUseLastResponse
2021-04-02 16:56:38 +00:00
}
2020-11-01 21:12:13 +00:00
}
2023-06-05 14:31:58 +00:00
2023-10-25 21:19:33 +00:00
c := & client {
2024-01-30 15:51:44 +00:00
c : hc ,
2023-02-09 19:13:06 +00:00
ctx : ctx ,
2021-04-05 09:15:07 +00:00
scrapeURL : sw . ScrapeURL ,
scrapeTimeoutSecondsStr : fmt . Sprintf ( "%.3f" , sw . ScrapeTimeout . Seconds ( ) ) ,
2024-01-30 15:51:44 +00:00
setHeaders : setHeaders ,
setProxyHeaders : setProxyHeaders ,
2024-06-20 11:58:42 +00:00
maxScrapeSize : sw . MaxScrapeSize ,
2023-10-25 21:19:33 +00:00
}
return c , nil
2020-02-23 11:35:47 +00:00
}
2024-01-30 15:51:44 +00:00
func ( c * client ) ReadData ( dst * bytesutil . ByteBuffer ) error {
deadline := time . Now ( ) . Add ( c . c . Timeout )
2023-02-09 19:13:06 +00:00
ctx , cancel := context . WithDeadline ( c . ctx , deadline )
2023-02-23 02:58:44 +00:00
req , err := http . NewRequestWithContext ( ctx , http . MethodGet , c . scrapeURL , nil )
2020-11-01 21:12:13 +00:00
if err != nil {
cancel ( )
2024-01-30 15:51:44 +00:00
return fmt . Errorf ( "cannot create request for %q: %w" , c . scrapeURL , err )
2020-11-01 21:12:13 +00:00
}
// The following `Accept` header has been copied from Prometheus sources.
// See https://github.com/prometheus/prometheus/blob/f9d21f10ecd2a343a381044f131ea4e46381ce09/scrape/scrape.go#L532 .
// This is needed as a workaround for scraping stupid Java-based servers such as Spring Boot.
// See https://github.com/VictoriaMetrics/VictoriaMetrics/issues/608 for details.
// Do not bloat the `Accept` header with OpenMetrics shit, since it looks like dead standard now.
req . Header . Set ( "Accept" , "text/plain;version=0.0.4;q=1,*/*;q=0.1" )
2024-05-07 10:09:44 +00:00
// We set to support exemplars to be compatible with Prometheus Exposition format which uses
// Open Metrics Specification
// See https://github.com/prometheus/docs/blob/main/content/docs/instrumenting/exposition_formats.md#openmetrics-text-format
if * scrapeExemplars {
req . Header . Set ( "Accept" , "application/openmetrics-text" )
}
2021-04-05 09:15:07 +00:00
// Set X-Prometheus-Scrape-Timeout-Seconds like Prometheus does, since it is used by some exporters such as PushProx.
// See https://github.com/VictoriaMetrics/VictoriaMetrics/issues/1179#issuecomment-813117162
req . Header . Set ( "X-Prometheus-Scrape-Timeout-Seconds" , c . scrapeTimeoutSecondsStr )
2024-01-30 15:51:44 +00:00
req . Header . Set ( "User-Agent" , "vm_promscrape" )
2023-10-25 21:19:33 +00:00
if err := c . setHeaders ( req ) ; err != nil {
2023-10-17 09:58:19 +00:00
cancel ( )
2024-01-30 15:51:44 +00:00
return fmt . Errorf ( "failed to set request headers for %q: %w" , c . scrapeURL , err )
2023-10-17 09:58:19 +00:00
}
2023-10-25 21:19:33 +00:00
if err := c . setProxyHeaders ( req ) ; err != nil {
2023-10-17 09:58:19 +00:00
cancel ( )
2024-01-30 15:51:44 +00:00
return fmt . Errorf ( "failed to set proxy request headers for %q: %w" , c . scrapeURL , err )
2023-10-17 09:58:19 +00:00
}
2022-08-16 11:52:38 +00:00
scrapeRequests . Inc ( )
2024-01-30 15:51:44 +00:00
resp , err := c . c . Do ( req )
2020-11-01 21:12:13 +00:00
if err != nil {
cancel ( )
2024-01-30 15:51:44 +00:00
if ue , ok := err . ( * url . Error ) ; ok && ue . Timeout ( ) {
scrapesTimedout . Inc ( )
}
return fmt . Errorf ( "cannot perform request to %q: %w" , c . scrapeURL , err )
2020-11-01 21:12:13 +00:00
}
if resp . StatusCode != http . StatusOK {
metrics . GetOrCreateCounter ( fmt . Sprintf ( ` vm_promscrape_scrapes_total { status_code="%d"} ` , resp . StatusCode ) ) . Inc ( )
2022-08-21 21:13:44 +00:00
respBody , _ := io . ReadAll ( resp . Body )
2020-11-01 21:12:13 +00:00
_ = resp . Body . Close ( )
cancel ( )
2024-01-30 15:51:44 +00:00
return fmt . Errorf ( "unexpected status code returned when scraping %q: %d; expecting %d; response body: %q" ,
2020-11-01 21:12:13 +00:00
c . scrapeURL , resp . StatusCode , http . StatusOK , respBody )
}
scrapesOK . Inc ( )
2024-01-30 15:51:44 +00:00
// Read the data from resp.Body
r := & io . LimitedReader {
R : resp . Body ,
2024-06-20 11:58:42 +00:00
N : c . maxScrapeSize ,
2022-05-03 10:31:31 +00:00
}
2024-01-30 15:51:44 +00:00
_ , err = dst . ReadFrom ( r )
_ = resp . Body . Close ( )
cancel ( )
2020-02-23 11:35:47 +00:00
if err != nil {
2024-01-30 15:51:44 +00:00
if ue , ok := err . ( * url . Error ) ; ok && ue . Timeout ( ) {
2020-02-23 11:35:47 +00:00
scrapesTimedout . Inc ( )
2020-05-24 11:41:08 +00:00
}
2024-01-30 15:51:44 +00:00
return fmt . Errorf ( "cannot read data from %s: %w" , c . scrapeURL , err )
2020-02-23 11:35:47 +00:00
}
2024-06-20 11:58:42 +00:00
if int64 ( len ( dst . B ) ) >= c . maxScrapeSize {
2022-12-28 20:19:41 +00:00
maxScrapeSizeExceeded . Inc ( )
2024-06-20 11:58:42 +00:00
return fmt . Errorf ( "the response from %q exceeds -promscrape.maxScrapeSize=%d or max_scrape_size in a scrape config. " +
"Possible solutions are: reduce the response size for the target, increase -promscrape.maxScrapeSize command-line flag, " +
"increase max_scrape_size value in scrape config" , c . scrapeURL , maxScrapeSize . N )
2022-12-28 20:19:41 +00:00
}
2024-01-30 15:51:44 +00:00
return nil
2020-02-23 11:35:47 +00:00
}
var (
2021-09-23 11:47:20 +00:00
maxScrapeSizeExceeded = metrics . NewCounter ( ` vm_promscrape_max_scrape_size_exceeded_errors_total ` )
scrapesTimedout = metrics . NewCounter ( ` vm_promscrape_scrapes_timed_out_total ` )
scrapesOK = metrics . NewCounter ( ` vm_promscrape_scrapes_total { status_code="200"} ` )
2022-08-16 11:52:38 +00:00
scrapeRequests = metrics . NewCounter ( ` vm_promscrape_scrape_requests_total ` )
2020-02-23 11:35:47 +00:00
)