mirror of
https://github.com/VictoriaMetrics/VictoriaMetrics.git
synced 2025-02-09 15:27:11 +00:00
lib/promscrape: add ability to configure proxy options via proxy_tls_config
, proxy_basic_auth
, proxy_bearer_token
and proxy_bearer_token_file
Updates https://github.com/VictoriaMetrics/VictoriaMetrics/issues/1116
This commit is contained in:
parent
fa448806a5
commit
a6a71ef861
8 changed files with 148 additions and 11 deletions
|
@ -255,6 +255,41 @@ If each target is scraped by multiple `vmagent` instances, then data deduplicati
|
||||||
See [these docs](https://victoriametrics.github.io/#deduplication) for details.
|
See [these docs](https://victoriametrics.github.io/#deduplication) for details.
|
||||||
|
|
||||||
|
|
||||||
|
## Scraping targets via a proxy
|
||||||
|
|
||||||
|
`vmagent` supports scraping targets via http and https proxies. Proxy address must be specified in `proxy_url` option. For example, the following scrape config instructs
|
||||||
|
target scraping via https proxy at `https://proxy-addr:1234`:
|
||||||
|
|
||||||
|
```yml
|
||||||
|
scrape_configs:
|
||||||
|
- job_name: foo
|
||||||
|
proxy_url: https://proxy-addr:1234
|
||||||
|
```
|
||||||
|
|
||||||
|
Proxy can be configured with the following optional settings:
|
||||||
|
|
||||||
|
* `proxy_bearer_token` and `proxy_bearer_token_file` for Bearer token authorization
|
||||||
|
* `proxy_basic_auth` for Basic authorization. See [these docs](https://prometheus.io/docs/prometheus/latest/configuration/configuration/#scrape_config).
|
||||||
|
* `proxy_tls_config` for TLS config. See [these docs](https://prometheus.io/docs/prometheus/latest/configuration/configuration/#tls_config).
|
||||||
|
|
||||||
|
For example:
|
||||||
|
|
||||||
|
```yml
|
||||||
|
scrape_configs:
|
||||||
|
- job_name: foo
|
||||||
|
proxy_url: https://proxy-addr:1234
|
||||||
|
proxy_basic_auth:
|
||||||
|
username: foobar
|
||||||
|
password: secret
|
||||||
|
proxy_tls_config:
|
||||||
|
insecure_skip_verify: true
|
||||||
|
cert_file: /path/to/cert
|
||||||
|
key_file: /path/to/key
|
||||||
|
ca_file: /path/to/ca
|
||||||
|
server_name: real-server-name
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
## Monitoring
|
## Monitoring
|
||||||
|
|
||||||
`vmagent` exports various metrics in Prometheus exposition format at `http://vmagent-host:8429/metrics` page. We recommend setting up regular scraping of this page
|
`vmagent` exports various metrics in Prometheus exposition format at `http://vmagent-host:8429/metrics` page. We recommend setting up regular scraping of this page
|
||||||
|
|
|
@ -10,6 +10,7 @@
|
||||||
* FEATURE: export `vm_available_memory_bytes` and `vm_available_cpu_cores` metrics, which show the number of available RAM and available CPU cores for VictoriaMetrics apps.
|
* FEATURE: export `vm_available_memory_bytes` and `vm_available_cpu_cores` metrics, which show the number of available RAM and available CPU cores for VictoriaMetrics apps.
|
||||||
* FEATURE: vmagent: add ability to replicate scrape targets among `vmagent` instances in the cluster with `-promscrape.cluster.replicationFactor` command-line flag. See [these docs](https://victoriametrics.github.io/vmagent.html#scraping-big-number-of-targets).
|
* FEATURE: vmagent: add ability to replicate scrape targets among `vmagent` instances in the cluster with `-promscrape.cluster.replicationFactor` command-line flag. See [these docs](https://victoriametrics.github.io/vmagent.html#scraping-big-number-of-targets).
|
||||||
* FATURE: vmagent: accept `scrape_offset` option at `scrape_config`. This option may be useful when scrapes must start at the specified offset of every scrape interval. See [these docs](https://victoriametrics.github.io/vmagent.html#troubleshooting) for details.
|
* FATURE: vmagent: accept `scrape_offset` option at `scrape_config`. This option may be useful when scrapes must start at the specified offset of every scrape interval. See [these docs](https://victoriametrics.github.io/vmagent.html#troubleshooting) for details.
|
||||||
|
* FEATURE: vmagent: support `proxy_tls_config`, `proxy_basic_auth`, `proxy_bearer_token` and `proxy_bearer_token_file` options at `scrape_config` section for configuring proxies specified via `proxy_url`. See [these docs](https://victoriametrics.github.io/vmagent.html#scraping-targets-via-a-proxy).
|
||||||
* FEATURE: vmauth: allow using regexp paths in `url_map`. See [this issue](https://github.com/VictoriaMetrics/VictoriaMetrics/issues/1112) for details.
|
* FEATURE: vmauth: allow using regexp paths in `url_map`. See [this issue](https://github.com/VictoriaMetrics/VictoriaMetrics/issues/1112) for details.
|
||||||
|
|
||||||
* BUGFIX: vmagent: prevent from high CPU usage bug during failing scrapes with small `scrape_timeout` (less than a few seconds).
|
* BUGFIX: vmagent: prevent from high CPU usage bug during failing scrapes with small `scrape_timeout` (less than a few seconds).
|
||||||
|
|
|
@ -255,6 +255,41 @@ If each target is scraped by multiple `vmagent` instances, then data deduplicati
|
||||||
See [these docs](https://victoriametrics.github.io/#deduplication) for details.
|
See [these docs](https://victoriametrics.github.io/#deduplication) for details.
|
||||||
|
|
||||||
|
|
||||||
|
## Scraping targets via a proxy
|
||||||
|
|
||||||
|
`vmagent` supports scraping targets via http and https proxies. Proxy address must be specified in `proxy_url` option. For example, the following scrape config instructs
|
||||||
|
target scraping via https proxy at `https://proxy-addr:1234`:
|
||||||
|
|
||||||
|
```yml
|
||||||
|
scrape_configs:
|
||||||
|
- job_name: foo
|
||||||
|
proxy_url: https://proxy-addr:1234
|
||||||
|
```
|
||||||
|
|
||||||
|
Proxy can be configured with the following optional settings:
|
||||||
|
|
||||||
|
* `proxy_bearer_token` and `proxy_bearer_token_file` for Bearer token authorization
|
||||||
|
* `proxy_basic_auth` for Basic authorization. See [these docs](https://prometheus.io/docs/prometheus/latest/configuration/configuration/#scrape_config).
|
||||||
|
* `proxy_tls_config` for TLS config. See [these docs](https://prometheus.io/docs/prometheus/latest/configuration/configuration/#tls_config).
|
||||||
|
|
||||||
|
For example:
|
||||||
|
|
||||||
|
```yml
|
||||||
|
scrape_configs:
|
||||||
|
- job_name: foo
|
||||||
|
proxy_url: https://proxy-addr:1234
|
||||||
|
proxy_basic_auth:
|
||||||
|
username: foobar
|
||||||
|
password: secret
|
||||||
|
proxy_tls_config:
|
||||||
|
insecure_skip_verify: true
|
||||||
|
cert_file: /path/to/cert
|
||||||
|
key_file: /path/to/key
|
||||||
|
ca_file: /path/to/ca
|
||||||
|
server_name: real-server-name
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
## Monitoring
|
## Monitoring
|
||||||
|
|
||||||
`vmagent` exports various metrics in Prometheus exposition format at `http://vmagent-host:8429/metrics` page. We recommend setting up regular scraping of this page
|
`vmagent` exports various metrics in Prometheus exposition format at `http://vmagent-host:8429/metrics` page. We recommend setting up regular scraping of this page
|
||||||
|
|
|
@ -67,7 +67,7 @@ func newClient(sw *ScrapeWork) *client {
|
||||||
host += ":443"
|
host += ":443"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
dialFunc, err := newStatDialFunc(sw.ProxyURL, sw.AuthConfig)
|
dialFunc, err := newStatDialFunc(sw.ProxyURL, sw.ProxyAuthConfig)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Fatalf("cannot create dial func: %s", err)
|
logger.Fatalf("cannot create dial func: %s", err)
|
||||||
}
|
}
|
||||||
|
|
|
@ -110,11 +110,15 @@ type ScrapeConfig struct {
|
||||||
SampleLimit int `yaml:"sample_limit,omitempty"`
|
SampleLimit int `yaml:"sample_limit,omitempty"`
|
||||||
|
|
||||||
// These options are supported only by lib/promscrape.
|
// These options are supported only by lib/promscrape.
|
||||||
DisableCompression bool `yaml:"disable_compression,omitempty"`
|
DisableCompression bool `yaml:"disable_compression,omitempty"`
|
||||||
DisableKeepAlive bool `yaml:"disable_keepalive,omitempty"`
|
DisableKeepAlive bool `yaml:"disable_keepalive,omitempty"`
|
||||||
StreamParse bool `yaml:"stream_parse,omitempty"`
|
StreamParse bool `yaml:"stream_parse,omitempty"`
|
||||||
ScrapeAlignInterval time.Duration `yaml:"scrape_align_interval,omitempty"`
|
ScrapeAlignInterval time.Duration `yaml:"scrape_align_interval,omitempty"`
|
||||||
ScrapeOffset time.Duration `yaml:"scrape_offset,omitempty"`
|
ScrapeOffset time.Duration `yaml:"scrape_offset,omitempty"`
|
||||||
|
ProxyTLSConfig *promauth.TLSConfig `yaml:"proxy_tls_config,omitempty"`
|
||||||
|
ProxyBasicAuth *promauth.BasicAuthConfig `yaml:"proxy_basic_auth,omitempty"`
|
||||||
|
ProxyBearerToken string `yaml:"proxy_bearer_token,omitempty"`
|
||||||
|
ProxyBearerTokenFile string `yaml:"proxy_bearer_token_file,omitempty"`
|
||||||
|
|
||||||
// This is set in loadConfig
|
// This is set in loadConfig
|
||||||
swc *scrapeWorkConfig
|
swc *scrapeWorkConfig
|
||||||
|
@ -543,6 +547,10 @@ func getScrapeWorkConfig(sc *ScrapeConfig, baseDir string, globalCfg *GlobalConf
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("cannot parse auth config for `job_name` %q: %w", jobName, err)
|
return nil, fmt.Errorf("cannot parse auth config for `job_name` %q: %w", jobName, err)
|
||||||
}
|
}
|
||||||
|
proxyAC, err := promauth.NewConfig(baseDir, sc.ProxyBasicAuth, sc.ProxyBearerToken, sc.ProxyBearerTokenFile, sc.ProxyTLSConfig)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("cannot parse proxy auth config for `job_name` %q: %w", jobName, err)
|
||||||
|
}
|
||||||
relabelConfigs, err := promrelabel.ParseRelabelConfigs(sc.RelabelConfigs)
|
relabelConfigs, err := promrelabel.ParseRelabelConfigs(sc.RelabelConfigs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("cannot parse `relabel_configs` for `job_name` %q: %w", jobName, err)
|
return nil, fmt.Errorf("cannot parse `relabel_configs` for `job_name` %q: %w", jobName, err)
|
||||||
|
@ -559,6 +567,7 @@ func getScrapeWorkConfig(sc *ScrapeConfig, baseDir string, globalCfg *GlobalConf
|
||||||
scheme: scheme,
|
scheme: scheme,
|
||||||
params: params,
|
params: params,
|
||||||
proxyURL: sc.ProxyURL,
|
proxyURL: sc.ProxyURL,
|
||||||
|
proxyAuthConfig: proxyAC,
|
||||||
authConfig: ac,
|
authConfig: ac,
|
||||||
honorLabels: honorLabels,
|
honorLabels: honorLabels,
|
||||||
honorTimestamps: honorTimestamps,
|
honorTimestamps: honorTimestamps,
|
||||||
|
@ -583,6 +592,7 @@ type scrapeWorkConfig struct {
|
||||||
scheme string
|
scheme string
|
||||||
params map[string][]string
|
params map[string][]string
|
||||||
proxyURL proxy.URL
|
proxyURL proxy.URL
|
||||||
|
proxyAuthConfig *promauth.Config
|
||||||
authConfig *promauth.Config
|
authConfig *promauth.Config
|
||||||
honorLabels bool
|
honorLabels bool
|
||||||
honorTimestamps bool
|
honorTimestamps bool
|
||||||
|
@ -849,6 +859,7 @@ func (swc *scrapeWorkConfig) getScrapeWork(target string, extraLabels, metaLabel
|
||||||
OriginalLabels: originalLabels,
|
OriginalLabels: originalLabels,
|
||||||
Labels: labels,
|
Labels: labels,
|
||||||
ProxyURL: swc.proxyURL,
|
ProxyURL: swc.proxyURL,
|
||||||
|
ProxyAuthConfig: swc.proxyAuthConfig,
|
||||||
AuthConfig: swc.authConfig,
|
AuthConfig: swc.authConfig,
|
||||||
MetricRelabelConfigs: swc.metricRelabelConfigs,
|
MetricRelabelConfigs: swc.metricRelabelConfigs,
|
||||||
SampleLimit: swc.sampleLimit,
|
SampleLimit: swc.sampleLimit,
|
||||||
|
|
|
@ -10,6 +10,7 @@ import (
|
||||||
|
|
||||||
"github.com/VictoriaMetrics/VictoriaMetrics/lib/promauth"
|
"github.com/VictoriaMetrics/VictoriaMetrics/lib/promauth"
|
||||||
"github.com/VictoriaMetrics/VictoriaMetrics/lib/prompbmarshal"
|
"github.com/VictoriaMetrics/VictoriaMetrics/lib/prompbmarshal"
|
||||||
|
"github.com/VictoriaMetrics/VictoriaMetrics/lib/proxy"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestNeedSkipScrapeWork(t *testing.T) {
|
func TestNeedSkipScrapeWork(t *testing.T) {
|
||||||
|
@ -154,6 +155,7 @@ scrape_configs:
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
AuthConfig: &promauth.Config{},
|
AuthConfig: &promauth.Config{},
|
||||||
|
ProxyAuthConfig: &promauth.Config{},
|
||||||
jobNameOriginal: "blackbox",
|
jobNameOriginal: "blackbox",
|
||||||
}}
|
}}
|
||||||
if !reflect.DeepEqual(sws, swsExpected) {
|
if !reflect.DeepEqual(sws, swsExpected) {
|
||||||
|
@ -548,6 +550,7 @@ scrape_configs:
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
AuthConfig: &promauth.Config{},
|
AuthConfig: &promauth.Config{},
|
||||||
|
ProxyAuthConfig: &promauth.Config{},
|
||||||
jobNameOriginal: "foo",
|
jobNameOriginal: "foo",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -587,6 +590,7 @@ scrape_configs:
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
AuthConfig: &promauth.Config{},
|
AuthConfig: &promauth.Config{},
|
||||||
|
ProxyAuthConfig: &promauth.Config{},
|
||||||
jobNameOriginal: "foo",
|
jobNameOriginal: "foo",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -626,6 +630,7 @@ scrape_configs:
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
AuthConfig: &promauth.Config{},
|
AuthConfig: &promauth.Config{},
|
||||||
|
ProxyAuthConfig: &promauth.Config{},
|
||||||
jobNameOriginal: "foo",
|
jobNameOriginal: "foo",
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
@ -679,6 +684,7 @@ scrape_configs:
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
AuthConfig: &promauth.Config{},
|
AuthConfig: &promauth.Config{},
|
||||||
|
ProxyAuthConfig: &promauth.Config{},
|
||||||
jobNameOriginal: "foo",
|
jobNameOriginal: "foo",
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
@ -729,6 +735,7 @@ scrape_configs:
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
AuthConfig: &promauth.Config{},
|
AuthConfig: &promauth.Config{},
|
||||||
|
ProxyAuthConfig: &promauth.Config{},
|
||||||
jobNameOriginal: "foo",
|
jobNameOriginal: "foo",
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
@ -748,6 +755,10 @@ scrape_configs:
|
||||||
p: ["x&y", "="]
|
p: ["x&y", "="]
|
||||||
xaa:
|
xaa:
|
||||||
bearer_token: xyz
|
bearer_token: xyz
|
||||||
|
proxy_url: http://foo.bar
|
||||||
|
proxy_basic_auth:
|
||||||
|
username: foo
|
||||||
|
password: bar
|
||||||
static_configs:
|
static_configs:
|
||||||
- targets: ["foo.bar", "aaa"]
|
- targets: ["foo.bar", "aaa"]
|
||||||
labels:
|
labels:
|
||||||
|
@ -801,6 +812,10 @@ scrape_configs:
|
||||||
AuthConfig: &promauth.Config{
|
AuthConfig: &promauth.Config{
|
||||||
Authorization: "Bearer xyz",
|
Authorization: "Bearer xyz",
|
||||||
},
|
},
|
||||||
|
ProxyAuthConfig: &promauth.Config{
|
||||||
|
Authorization: "Basic Zm9vOmJhcg==",
|
||||||
|
},
|
||||||
|
ProxyURL: proxy.MustNewURL("http://foo.bar"),
|
||||||
jobNameOriginal: "foo",
|
jobNameOriginal: "foo",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -842,6 +857,10 @@ scrape_configs:
|
||||||
AuthConfig: &promauth.Config{
|
AuthConfig: &promauth.Config{
|
||||||
Authorization: "Bearer xyz",
|
Authorization: "Bearer xyz",
|
||||||
},
|
},
|
||||||
|
ProxyAuthConfig: &promauth.Config{
|
||||||
|
Authorization: "Basic Zm9vOmJhcg==",
|
||||||
|
},
|
||||||
|
ProxyURL: proxy.MustNewURL("http://foo.bar"),
|
||||||
jobNameOriginal: "foo",
|
jobNameOriginal: "foo",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -877,6 +896,7 @@ scrape_configs:
|
||||||
TLSServerName: "foobar",
|
TLSServerName: "foobar",
|
||||||
TLSInsecureSkipVerify: true,
|
TLSInsecureSkipVerify: true,
|
||||||
},
|
},
|
||||||
|
ProxyAuthConfig: &promauth.Config{},
|
||||||
jobNameOriginal: "qwer",
|
jobNameOriginal: "qwer",
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
@ -955,6 +975,7 @@ scrape_configs:
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
AuthConfig: &promauth.Config{},
|
AuthConfig: &promauth.Config{},
|
||||||
|
ProxyAuthConfig: &promauth.Config{},
|
||||||
jobNameOriginal: "foo",
|
jobNameOriginal: "foo",
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
@ -1017,6 +1038,7 @@ scrape_configs:
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
AuthConfig: &promauth.Config{},
|
AuthConfig: &promauth.Config{},
|
||||||
|
ProxyAuthConfig: &promauth.Config{},
|
||||||
jobNameOriginal: "foo",
|
jobNameOriginal: "foo",
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
@ -1060,6 +1082,7 @@ scrape_configs:
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
AuthConfig: &promauth.Config{},
|
AuthConfig: &promauth.Config{},
|
||||||
|
ProxyAuthConfig: &promauth.Config{},
|
||||||
jobNameOriginal: "foo",
|
jobNameOriginal: "foo",
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
@ -1099,7 +1122,8 @@ scrape_configs:
|
||||||
Value: "foo",
|
Value: "foo",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
AuthConfig: &promauth.Config{},
|
AuthConfig: &promauth.Config{},
|
||||||
|
ProxyAuthConfig: &promauth.Config{},
|
||||||
MetricRelabelConfigs: mustParseRelabelConfigs(`
|
MetricRelabelConfigs: mustParseRelabelConfigs(`
|
||||||
- source_labels: [foo]
|
- source_labels: [foo]
|
||||||
target_label: abc
|
target_label: abc
|
||||||
|
@ -1145,6 +1169,7 @@ scrape_configs:
|
||||||
AuthConfig: &promauth.Config{
|
AuthConfig: &promauth.Config{
|
||||||
Authorization: "Basic eHl6OnNlY3JldC1wYXNz",
|
Authorization: "Basic eHl6OnNlY3JldC1wYXNz",
|
||||||
},
|
},
|
||||||
|
ProxyAuthConfig: &promauth.Config{},
|
||||||
jobNameOriginal: "foo",
|
jobNameOriginal: "foo",
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
@ -1184,6 +1209,7 @@ scrape_configs:
|
||||||
AuthConfig: &promauth.Config{
|
AuthConfig: &promauth.Config{
|
||||||
Authorization: "Bearer secret-pass",
|
Authorization: "Bearer secret-pass",
|
||||||
},
|
},
|
||||||
|
ProxyAuthConfig: &promauth.Config{},
|
||||||
jobNameOriginal: "foo",
|
jobNameOriginal: "foo",
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
@ -1229,6 +1255,7 @@ scrape_configs:
|
||||||
AuthConfig: &promauth.Config{
|
AuthConfig: &promauth.Config{
|
||||||
TLSCertificate: &snakeoilCert,
|
TLSCertificate: &snakeoilCert,
|
||||||
},
|
},
|
||||||
|
ProxyAuthConfig: &promauth.Config{},
|
||||||
jobNameOriginal: "foo",
|
jobNameOriginal: "foo",
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
@ -1291,6 +1318,7 @@ scrape_configs:
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
AuthConfig: &promauth.Config{},
|
AuthConfig: &promauth.Config{},
|
||||||
|
ProxyAuthConfig: &promauth.Config{},
|
||||||
jobNameOriginal: "aaa",
|
jobNameOriginal: "aaa",
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
@ -1352,6 +1380,7 @@ scrape_configs:
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
AuthConfig: &promauth.Config{},
|
AuthConfig: &promauth.Config{},
|
||||||
|
ProxyAuthConfig: &promauth.Config{},
|
||||||
SampleLimit: 100,
|
SampleLimit: 100,
|
||||||
DisableKeepAlive: true,
|
DisableKeepAlive: true,
|
||||||
DisableCompression: true,
|
DisableCompression: true,
|
||||||
|
@ -1398,6 +1427,7 @@ scrape_configs:
|
||||||
},
|
},
|
||||||
jobNameOriginal: "path wo slash",
|
jobNameOriginal: "path wo slash",
|
||||||
AuthConfig: &promauth.Config{},
|
AuthConfig: &promauth.Config{},
|
||||||
|
ProxyAuthConfig: &promauth.Config{},
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -68,12 +68,15 @@ type ScrapeWork struct {
|
||||||
// See also https://prometheus.io/docs/concepts/jobs_instances/
|
// See also https://prometheus.io/docs/concepts/jobs_instances/
|
||||||
Labels []prompbmarshal.Label
|
Labels []prompbmarshal.Label
|
||||||
|
|
||||||
// Auth config
|
|
||||||
AuthConfig *promauth.Config
|
|
||||||
|
|
||||||
// ProxyURL HTTP proxy url
|
// ProxyURL HTTP proxy url
|
||||||
ProxyURL proxy.URL
|
ProxyURL proxy.URL
|
||||||
|
|
||||||
|
// Auth config for ProxyUR:
|
||||||
|
ProxyAuthConfig *promauth.Config
|
||||||
|
|
||||||
|
// Auth config
|
||||||
|
AuthConfig *promauth.Config
|
||||||
|
|
||||||
// Optional `metric_relabel_configs`.
|
// Optional `metric_relabel_configs`.
|
||||||
MetricRelabelConfigs *promrelabel.ParsedConfigs
|
MetricRelabelConfigs *promrelabel.ParsedConfigs
|
||||||
|
|
||||||
|
@ -105,9 +108,10 @@ type ScrapeWork struct {
|
||||||
func (sw *ScrapeWork) key() string {
|
func (sw *ScrapeWork) key() string {
|
||||||
// Do not take into account OriginalLabels.
|
// Do not take into account OriginalLabels.
|
||||||
key := fmt.Sprintf("ScrapeURL=%s, ScrapeInterval=%s, ScrapeTimeout=%s, HonorLabels=%v, HonorTimestamps=%v, Labels=%s, "+
|
key := fmt.Sprintf("ScrapeURL=%s, ScrapeInterval=%s, ScrapeTimeout=%s, HonorLabels=%v, HonorTimestamps=%v, Labels=%s, "+
|
||||||
"AuthConfig=%s, MetricRelabelConfigs=%s, SampleLimit=%d, DisableCompression=%v, DisableKeepAlive=%v, StreamParse=%v, "+
|
"ProxyURL=%s, ProxyAuthConfig=%s, AuthConfig=%s, MetricRelabelConfigs=%s, SampleLimit=%d, DisableCompression=%v, DisableKeepAlive=%v, StreamParse=%v, "+
|
||||||
"ScrapeAlignInterval=%s, ScrapeOffset=%s",
|
"ScrapeAlignInterval=%s, ScrapeOffset=%s",
|
||||||
sw.ScrapeURL, sw.ScrapeInterval, sw.ScrapeTimeout, sw.HonorLabels, sw.HonorTimestamps, sw.LabelsString(),
|
sw.ScrapeURL, sw.ScrapeInterval, sw.ScrapeTimeout, sw.HonorLabels, sw.HonorTimestamps, sw.LabelsString(),
|
||||||
|
sw.ProxyURL.String(), sw.ProxyAuthConfig.String(),
|
||||||
sw.AuthConfig.String(), sw.MetricRelabelConfigs.String(), sw.SampleLimit, sw.DisableCompression, sw.DisableKeepAlive, sw.StreamParse,
|
sw.AuthConfig.String(), sw.MetricRelabelConfigs.String(), sw.SampleLimit, sw.DisableCompression, sw.DisableKeepAlive, sw.StreamParse,
|
||||||
sw.ScrapeAlignInterval, sw.ScrapeOffset)
|
sw.ScrapeAlignInterval, sw.ScrapeOffset)
|
||||||
return key
|
return key
|
||||||
|
|
|
@ -10,6 +10,7 @@ import (
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/VictoriaMetrics/VictoriaMetrics/lib/logger"
|
||||||
"github.com/VictoriaMetrics/VictoriaMetrics/lib/netutil"
|
"github.com/VictoriaMetrics/VictoriaMetrics/lib/netutil"
|
||||||
"github.com/VictoriaMetrics/VictoriaMetrics/lib/promauth"
|
"github.com/VictoriaMetrics/VictoriaMetrics/lib/promauth"
|
||||||
"github.com/VictoriaMetrics/fasthttp"
|
"github.com/VictoriaMetrics/fasthttp"
|
||||||
|
@ -20,6 +21,17 @@ type URL struct {
|
||||||
url *url.URL
|
url *url.URL
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// MustNewURL returns new URL for the given u.
|
||||||
|
func MustNewURL(u string) URL {
|
||||||
|
pu, err := url.Parse(u)
|
||||||
|
if err != nil {
|
||||||
|
logger.Panicf("BUG: cannot parse u=%q: %s", u, err)
|
||||||
|
}
|
||||||
|
return URL{
|
||||||
|
url: pu,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// URL return the underlying url.
|
// URL return the underlying url.
|
||||||
func (u *URL) URL() *url.URL {
|
func (u *URL) URL() *url.URL {
|
||||||
if u == nil || u.url == nil {
|
if u == nil || u.url == nil {
|
||||||
|
@ -28,6 +40,15 @@ func (u *URL) URL() *url.URL {
|
||||||
return u.url
|
return u.url
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// String returns string representation of u.
|
||||||
|
func (u *URL) String() string {
|
||||||
|
pu := u.URL()
|
||||||
|
if pu == nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
return pu.String()
|
||||||
|
}
|
||||||
|
|
||||||
// MarshalYAML implements yaml.Marshaler interface.
|
// MarshalYAML implements yaml.Marshaler interface.
|
||||||
func (u *URL) MarshalYAML() (interface{}, error) {
|
func (u *URL) MarshalYAML() (interface{}, error) {
|
||||||
if u.url == nil {
|
if u.url == nil {
|
||||||
|
|
Loading…
Reference in a new issue