2020-02-23 11:35:47 +00:00
|
|
|
package promscrape
|
|
|
|
|
|
|
|
import (
|
|
|
|
"fmt"
|
|
|
|
"reflect"
|
2022-04-16 11:25:54 +00:00
|
|
|
"strings"
|
2020-02-23 11:35:47 +00:00
|
|
|
"testing"
|
|
|
|
"time"
|
|
|
|
|
2020-04-13 09:59:05 +00:00
|
|
|
"github.com/VictoriaMetrics/VictoriaMetrics/lib/promauth"
|
2022-05-06 21:02:54 +00:00
|
|
|
"github.com/VictoriaMetrics/VictoriaMetrics/lib/promrelabel"
|
|
|
|
"github.com/VictoriaMetrics/VictoriaMetrics/lib/promscrape/discovery/gce"
|
|
|
|
"github.com/VictoriaMetrics/VictoriaMetrics/lib/promutils"
|
2021-03-12 01:35:49 +00:00
|
|
|
"github.com/VictoriaMetrics/VictoriaMetrics/lib/proxy"
|
2024-07-17 11:52:10 +00:00
|
|
|
"github.com/VictoriaMetrics/VictoriaMetrics/lib/stringsutil"
|
2020-02-23 11:35:47 +00:00
|
|
|
)
|
|
|
|
|
2022-04-20 12:25:41 +00:00
|
|
|
func TestMergeLabels(t *testing.T) {
|
2022-11-30 05:22:12 +00:00
|
|
|
f := func(swc *scrapeWorkConfig, target string, extraLabelsMap, metaLabelsMap map[string]string, resultExpected string) {
|
2022-04-20 12:25:41 +00:00
|
|
|
t.Helper()
|
2022-11-30 05:22:12 +00:00
|
|
|
extraLabels := promutils.NewLabelsFromMap(extraLabelsMap)
|
|
|
|
metaLabels := promutils.NewLabelsFromMap(metaLabelsMap)
|
|
|
|
labels := promutils.NewLabels(0)
|
|
|
|
mergeLabels(labels, swc, target, extraLabels, metaLabels)
|
|
|
|
result := labels.String()
|
2022-04-20 12:25:41 +00:00
|
|
|
if result != resultExpected {
|
|
|
|
t.Fatalf("unexpected result;\ngot\n%s\nwant\n%s", result, resultExpected)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
f(&scrapeWorkConfig{}, "foo", nil, nil, `{__address__="foo",__metrics_path__="",__scheme__="",__scrape_interval__="",__scrape_timeout__="",job=""}`)
|
|
|
|
f(&scrapeWorkConfig{}, "foo", map[string]string{"foo": "bar"}, nil, `{__address__="foo",__metrics_path__="",__scheme__="",__scrape_interval__="",__scrape_timeout__="",foo="bar",job=""}`)
|
|
|
|
f(&scrapeWorkConfig{}, "foo", map[string]string{"job": "bar"}, nil, `{__address__="foo",__metrics_path__="",__scheme__="",__scrape_interval__="",__scrape_timeout__="",job="bar"}`)
|
|
|
|
f(&scrapeWorkConfig{
|
|
|
|
jobName: "xyz",
|
|
|
|
scheme: "https",
|
|
|
|
metricsPath: "/foo/bar",
|
|
|
|
scrapeIntervalString: "15s",
|
|
|
|
scrapeTimeoutString: "10s",
|
2022-10-01 13:13:17 +00:00
|
|
|
}, "foo", nil, nil, `{__address__="foo",__metrics_path__="/foo/bar",__scheme__="https",__scrape_interval__="15s",__scrape_timeout__="10s",job="xyz"}`)
|
2022-04-20 12:25:41 +00:00
|
|
|
f(&scrapeWorkConfig{
|
|
|
|
jobName: "xyz",
|
|
|
|
scheme: "https",
|
|
|
|
metricsPath: "/foo/bar",
|
|
|
|
}, "foo", map[string]string{
|
|
|
|
"job": "extra_job",
|
|
|
|
"foo": "extra_foo",
|
|
|
|
"a": "xyz",
|
|
|
|
}, map[string]string{
|
|
|
|
"__meta_x": "y",
|
|
|
|
}, `{__address__="foo",__meta_x="y",__metrics_path__="/foo/bar",__scheme__="https",__scrape_interval__="",__scrape_timeout__="",a="xyz",foo="extra_foo",job="extra_job"}`)
|
|
|
|
}
|
|
|
|
|
2022-04-16 11:25:54 +00:00
|
|
|
func TestScrapeConfigUnmarshalMarshal(t *testing.T) {
|
|
|
|
f := func(data string) {
|
|
|
|
t.Helper()
|
|
|
|
var cfg Config
|
|
|
|
data = strings.TrimSpace(data)
|
|
|
|
if err := cfg.unmarshal([]byte(data), true); err != nil {
|
|
|
|
t.Fatalf("parse error: %s\ndata:\n%s", err, data)
|
|
|
|
}
|
|
|
|
resultData := string(cfg.marshal())
|
|
|
|
result := strings.TrimSpace(resultData)
|
|
|
|
if result != data {
|
|
|
|
t.Fatalf("unexpected marshaled config:\ngot\n%s\nwant\n%s", result, data)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
f(`
|
|
|
|
global:
|
|
|
|
scrape_interval: 10s
|
|
|
|
`)
|
|
|
|
f(`
|
|
|
|
scrape_config_files:
|
|
|
|
- foo
|
|
|
|
- bar
|
|
|
|
`)
|
|
|
|
f(`
|
|
|
|
scrape_configs:
|
|
|
|
- job_name: foo
|
|
|
|
scrape_timeout: 1.5s
|
|
|
|
static_configs:
|
|
|
|
- targets:
|
|
|
|
- foo
|
|
|
|
- bar
|
|
|
|
labels:
|
|
|
|
foo: bar
|
|
|
|
`)
|
|
|
|
f(`
|
|
|
|
scrape_configs:
|
|
|
|
- job_name: foo
|
|
|
|
honor_labels: true
|
2023-07-29 04:08:41 +00:00
|
|
|
honor_timestamps: true
|
2022-04-16 11:25:54 +00:00
|
|
|
scheme: https
|
|
|
|
params:
|
|
|
|
foo:
|
|
|
|
- x
|
|
|
|
authorization:
|
|
|
|
type: foobar
|
2022-06-22 17:38:43 +00:00
|
|
|
headers:
|
|
|
|
- 'TenantID: fooBar'
|
|
|
|
- 'X: y:z'
|
2022-04-16 11:25:54 +00:00
|
|
|
relabel_configs:
|
|
|
|
- source_labels: [abc]
|
|
|
|
static_configs:
|
|
|
|
- targets:
|
|
|
|
- foo
|
|
|
|
scrape_align_interval: 1h30m0s
|
|
|
|
proxy_bearer_token_file: file.txt
|
2022-06-22 17:38:43 +00:00
|
|
|
proxy_headers:
|
|
|
|
- 'My-Auth-Header: top-secret'
|
2022-04-16 11:25:54 +00:00
|
|
|
`)
|
|
|
|
}
|
|
|
|
|
2023-12-06 22:05:29 +00:00
|
|
|
func TestGetClusterMemberNumsForScrapeWork(t *testing.T) {
|
|
|
|
f := func(key string, membersCount, replicationFactor int, expectedMemberNums []int) {
|
2021-03-04 08:20:15 +00:00
|
|
|
t.Helper()
|
2023-12-06 22:05:29 +00:00
|
|
|
memberNums := getClusterMemberNumsForScrapeWork(key, membersCount, replicationFactor)
|
|
|
|
if !reflect.DeepEqual(memberNums, expectedMemberNums) {
|
|
|
|
t.Fatalf("unexpected memberNums; got %d; want %d", memberNums, expectedMemberNums)
|
2021-03-04 08:20:15 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
// Disabled clustering
|
2023-12-06 22:05:29 +00:00
|
|
|
f("foo", 0, 0, []int{0})
|
|
|
|
f("foo", 0, 0, []int{0})
|
2021-03-04 08:20:15 +00:00
|
|
|
|
|
|
|
// A cluster with 2 nodes with disabled replication
|
2023-12-06 22:05:29 +00:00
|
|
|
f("baz", 2, 0, []int{0})
|
|
|
|
f("foo", 2, 0, []int{1})
|
2021-03-04 08:20:15 +00:00
|
|
|
|
|
|
|
// A cluster with 2 nodes with replicationFactor=2
|
2023-12-06 22:05:29 +00:00
|
|
|
f("baz", 2, 2, []int{0, 1})
|
|
|
|
f("foo", 2, 2, []int{1, 0})
|
2021-03-04 08:20:15 +00:00
|
|
|
|
|
|
|
// A cluster with 3 nodes with replicationFactor=2
|
2023-12-06 22:05:29 +00:00
|
|
|
f("abc", 3, 2, []int{0, 1})
|
|
|
|
f("bar", 3, 2, []int{1, 2})
|
|
|
|
f("foo", 3, 2, []int{2, 0})
|
2021-03-04 08:20:15 +00:00
|
|
|
}
|
|
|
|
|
2020-02-23 11:35:47 +00:00
|
|
|
func TestLoadStaticConfigs(t *testing.T) {
|
|
|
|
scs, err := loadStaticConfigs("testdata/file_sd.json")
|
|
|
|
if err != nil {
|
|
|
|
t.Fatalf("unexpected error: %s", err)
|
|
|
|
}
|
|
|
|
if len(scs) == 0 {
|
|
|
|
t.Fatalf("expecting non-zero static configs")
|
|
|
|
}
|
|
|
|
|
|
|
|
// Try loading non-existing file
|
|
|
|
scs, err = loadStaticConfigs("testdata/non-exsiting-file")
|
|
|
|
if err == nil {
|
|
|
|
t.Fatalf("expecting non-nil error")
|
|
|
|
}
|
|
|
|
if scs != nil {
|
|
|
|
t.Fatalf("unexpected non-nil static configs: %#v", scs)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Try loading invalid file
|
|
|
|
scs, err = loadStaticConfigs("testdata/prometheus.yml")
|
|
|
|
if err == nil {
|
|
|
|
t.Fatalf("expecting non-nil error")
|
|
|
|
}
|
|
|
|
if scs != nil {
|
|
|
|
t.Fatalf("unexpected non-nil static configs: %#v", scs)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestLoadConfig(t *testing.T) {
|
2023-10-25 21:19:33 +00:00
|
|
|
cfg, err := loadConfig("testdata/prometheus.yml")
|
2021-08-26 05:51:14 +00:00
|
|
|
if err != nil {
|
|
|
|
t.Fatalf("unexpected error: %s", err)
|
|
|
|
}
|
|
|
|
if cfg == nil {
|
|
|
|
t.Fatalf("expecting non-nil config")
|
|
|
|
}
|
|
|
|
|
2023-10-25 21:19:33 +00:00
|
|
|
cfg, err = loadConfig("testdata/prometheus-with-scrape-config-files.yml")
|
2020-02-23 11:35:47 +00:00
|
|
|
if err != nil {
|
|
|
|
t.Fatalf("unexpected error: %s", err)
|
|
|
|
}
|
2020-05-03 09:41:13 +00:00
|
|
|
if cfg == nil {
|
|
|
|
t.Fatalf("expecting non-nil config")
|
2020-02-23 11:35:47 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Try loading non-existing file
|
2023-10-25 21:19:33 +00:00
|
|
|
cfg, err = loadConfig("testdata/non-existing-file")
|
2020-02-23 11:35:47 +00:00
|
|
|
if err == nil {
|
|
|
|
t.Fatalf("expecting non-nil error")
|
|
|
|
}
|
|
|
|
if cfg != nil {
|
|
|
|
t.Fatalf("unexpected non-nil config: %#v", cfg)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Try loading invalid file
|
2023-10-25 21:19:33 +00:00
|
|
|
cfg, err = loadConfig("testdata/file_sd_1.yml")
|
2020-02-23 11:35:47 +00:00
|
|
|
if err == nil {
|
|
|
|
t.Fatalf("expecting non-nil error")
|
|
|
|
}
|
|
|
|
if cfg != nil {
|
|
|
|
t.Fatalf("unexpected non-nil config: %#v", cfg)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-10-07 19:39:28 +00:00
|
|
|
func TestAddressWithFullURL(t *testing.T) {
|
|
|
|
data := `
|
|
|
|
scrape_configs:
|
|
|
|
- job_name: abc
|
|
|
|
metrics_path: /foo/bar
|
|
|
|
scheme: https
|
|
|
|
params:
|
|
|
|
x: [y]
|
|
|
|
static_configs:
|
|
|
|
- targets:
|
|
|
|
# the following targets are scraped by the provided urls
|
|
|
|
- 'http://host1/metric/path1'
|
|
|
|
- 'https://host2/metric/path2'
|
|
|
|
- 'http://host3:1234/metric/path3?arg1=value1'
|
|
|
|
# the following target is scraped by <scheme>://host4:1234<metrics_path>
|
|
|
|
- host4:1234
|
|
|
|
`
|
|
|
|
var cfg Config
|
2023-10-25 21:19:33 +00:00
|
|
|
if err := cfg.parseData([]byte(data), "sss"); err != nil {
|
2022-10-07 19:39:28 +00:00
|
|
|
t.Fatalf("cannot parase data: %s", err)
|
|
|
|
}
|
|
|
|
sws := cfg.getStaticScrapeWork()
|
|
|
|
swsExpected := []*ScrapeWork{
|
|
|
|
{
|
2024-08-19 20:28:49 +00:00
|
|
|
ScrapeURL: "http://host1/metric/path1?x=y",
|
2023-07-29 04:08:41 +00:00
|
|
|
ScrapeInterval: defaultScrapeInterval,
|
|
|
|
ScrapeTimeout: defaultScrapeTimeout,
|
2024-06-20 11:58:42 +00:00
|
|
|
MaxScrapeSize: maxScrapeSize.N,
|
2022-11-30 05:22:12 +00:00
|
|
|
Labels: promutils.NewLabelsFromMap(map[string]string{
|
2024-08-27 11:04:26 +00:00
|
|
|
"instance": "host1:80",
|
2022-11-30 05:22:12 +00:00
|
|
|
"job": "abc",
|
|
|
|
}),
|
2022-10-07 19:39:28 +00:00
|
|
|
jobNameOriginal: "abc",
|
|
|
|
},
|
|
|
|
{
|
2024-08-19 20:28:49 +00:00
|
|
|
ScrapeURL: "https://host2/metric/path2?x=y",
|
2023-07-29 04:08:41 +00:00
|
|
|
ScrapeInterval: defaultScrapeInterval,
|
|
|
|
ScrapeTimeout: defaultScrapeTimeout,
|
2024-06-20 11:58:42 +00:00
|
|
|
MaxScrapeSize: maxScrapeSize.N,
|
2022-11-30 05:22:12 +00:00
|
|
|
Labels: promutils.NewLabelsFromMap(map[string]string{
|
2024-08-27 11:04:26 +00:00
|
|
|
"instance": "host2:443",
|
2022-11-30 05:22:12 +00:00
|
|
|
"job": "abc",
|
|
|
|
}),
|
2022-10-07 19:39:28 +00:00
|
|
|
jobNameOriginal: "abc",
|
|
|
|
},
|
|
|
|
{
|
2023-07-29 04:08:41 +00:00
|
|
|
ScrapeURL: "http://host3:1234/metric/path3?arg1=value1&x=y",
|
|
|
|
ScrapeInterval: defaultScrapeInterval,
|
|
|
|
ScrapeTimeout: defaultScrapeTimeout,
|
2024-06-20 11:58:42 +00:00
|
|
|
MaxScrapeSize: maxScrapeSize.N,
|
2022-11-30 05:22:12 +00:00
|
|
|
Labels: promutils.NewLabelsFromMap(map[string]string{
|
|
|
|
"instance": "host3:1234",
|
|
|
|
"job": "abc",
|
|
|
|
}),
|
2022-10-07 19:39:28 +00:00
|
|
|
jobNameOriginal: "abc",
|
|
|
|
},
|
|
|
|
{
|
2023-07-29 04:08:41 +00:00
|
|
|
ScrapeURL: "https://host4:1234/foo/bar?x=y",
|
|
|
|
ScrapeInterval: defaultScrapeInterval,
|
|
|
|
ScrapeTimeout: defaultScrapeTimeout,
|
2024-06-20 11:58:42 +00:00
|
|
|
MaxScrapeSize: maxScrapeSize.N,
|
2022-11-30 05:22:12 +00:00
|
|
|
Labels: promutils.NewLabelsFromMap(map[string]string{
|
|
|
|
"instance": "host4:1234",
|
|
|
|
"job": "abc",
|
|
|
|
}),
|
2022-10-07 19:39:28 +00:00
|
|
|
jobNameOriginal: "abc",
|
|
|
|
},
|
|
|
|
}
|
2023-10-25 21:19:33 +00:00
|
|
|
checkEqualScrapeWorks(t, sws, swsExpected)
|
2022-10-07 19:39:28 +00:00
|
|
|
}
|
|
|
|
|
2020-08-09 09:02:32 +00:00
|
|
|
func TestBlackboxExporter(t *testing.T) {
|
|
|
|
// See https://github.com/VictoriaMetrics/VictoriaMetrics/issues/684
|
|
|
|
data := `
|
|
|
|
scrape_configs:
|
|
|
|
- job_name: 'blackbox'
|
|
|
|
metrics_path: /probe
|
|
|
|
params:
|
|
|
|
module: [dns_udp_example] # Look for dns response
|
|
|
|
static_configs:
|
|
|
|
- targets:
|
|
|
|
- 8.8.8.8
|
|
|
|
relabel_configs:
|
|
|
|
- source_labels: [__address__]
|
|
|
|
target_label: __param_target
|
|
|
|
- source_labels: [__param_target]
|
|
|
|
target_label: instance
|
|
|
|
- target_label: __address__
|
|
|
|
replacement: black:9115 # The blackbox exporter's real hostname:port.%
|
|
|
|
`
|
|
|
|
var cfg Config
|
2023-10-25 21:19:33 +00:00
|
|
|
if err := cfg.parseData([]byte(data), "sss"); err != nil {
|
2020-08-09 09:02:32 +00:00
|
|
|
t.Fatalf("cannot parase data: %s", err)
|
|
|
|
}
|
|
|
|
sws := cfg.getStaticScrapeWork()
|
2020-12-08 15:50:03 +00:00
|
|
|
swsExpected := []*ScrapeWork{{
|
2023-07-29 04:08:41 +00:00
|
|
|
ScrapeURL: "http://black:9115/probe?module=dns_udp_example&target=8.8.8.8",
|
|
|
|
ScrapeInterval: defaultScrapeInterval,
|
|
|
|
ScrapeTimeout: defaultScrapeTimeout,
|
2024-06-20 11:58:42 +00:00
|
|
|
MaxScrapeSize: maxScrapeSize.N,
|
2022-11-30 05:22:12 +00:00
|
|
|
Labels: promutils.NewLabelsFromMap(map[string]string{
|
|
|
|
"instance": "8.8.8.8",
|
|
|
|
"job": "blackbox",
|
|
|
|
}),
|
2020-08-09 09:02:32 +00:00
|
|
|
jobNameOriginal: "blackbox",
|
|
|
|
}}
|
2023-10-25 21:19:33 +00:00
|
|
|
checkEqualScrapeWorks(t, sws, swsExpected)
|
2020-08-09 09:02:32 +00:00
|
|
|
}
|
|
|
|
|
2020-02-23 11:35:47 +00:00
|
|
|
func TestGetFileSDScrapeWork(t *testing.T) {
|
|
|
|
data := `
|
|
|
|
scrape_configs:
|
|
|
|
- job_name: foo
|
|
|
|
file_sd_configs:
|
|
|
|
- files: [testdata/file_sd.json]
|
|
|
|
`
|
|
|
|
var cfg Config
|
2023-10-25 21:19:33 +00:00
|
|
|
if err := cfg.parseData([]byte(data), "sss"); err != nil {
|
2020-02-23 11:35:47 +00:00
|
|
|
t.Fatalf("cannot parase data: %s", err)
|
|
|
|
}
|
2020-04-13 09:59:05 +00:00
|
|
|
sws := cfg.getFileSDScrapeWork(nil)
|
2020-02-23 11:35:47 +00:00
|
|
|
if !equalStaticConfigForScrapeWorks(sws, sws) {
|
|
|
|
t.Fatalf("unexpected non-equal static configs;\nsws:\n%#v", sws)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Load another static config
|
|
|
|
dataNew := `
|
|
|
|
scrape_configs:
|
|
|
|
- job_name: foo
|
|
|
|
file_sd_configs:
|
|
|
|
- files: [testdata/file_sd_1.yml]
|
|
|
|
`
|
|
|
|
var cfgNew Config
|
2023-10-25 21:19:33 +00:00
|
|
|
if err := cfgNew.parseData([]byte(dataNew), "sss"); err != nil {
|
2020-02-23 11:35:47 +00:00
|
|
|
t.Fatalf("cannot parse data: %s", err)
|
|
|
|
}
|
2020-04-13 09:59:05 +00:00
|
|
|
swsNew := cfgNew.getFileSDScrapeWork(sws)
|
2020-02-23 11:35:47 +00:00
|
|
|
if equalStaticConfigForScrapeWorks(swsNew, sws) {
|
|
|
|
t.Fatalf("unexpected equal static configs;\nswsNew:\n%#v\nsws:\n%#v", swsNew, sws)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Try loading invalid static config
|
|
|
|
data = `
|
|
|
|
scrape_configs:
|
|
|
|
- job_name: foo
|
|
|
|
file_sd_configs:
|
|
|
|
- files: [testdata/prometheus.yml]
|
|
|
|
`
|
2023-10-25 21:19:33 +00:00
|
|
|
if err := cfg.parseData([]byte(data), "sss"); err != nil {
|
2020-02-23 11:35:47 +00:00
|
|
|
t.Fatalf("cannot parse data: %s", err)
|
|
|
|
}
|
2020-04-13 09:59:05 +00:00
|
|
|
sws = cfg.getFileSDScrapeWork(swsNew)
|
2020-02-23 11:35:47 +00:00
|
|
|
if len(sws) != 0 {
|
|
|
|
t.Fatalf("unexpected non-empty sws:\n%#v", sws)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Empty target in static config
|
|
|
|
data = `
|
|
|
|
scrape_configs:
|
|
|
|
- job_name: foo
|
|
|
|
file_sd_configs:
|
|
|
|
- files: [testdata/empty_target_file_sd.yml]
|
|
|
|
`
|
2023-10-25 21:19:33 +00:00
|
|
|
if err := cfg.parseData([]byte(data), "sss"); err != nil {
|
2020-02-23 11:35:47 +00:00
|
|
|
t.Fatalf("cannot parse data: %s", err)
|
|
|
|
}
|
2020-04-13 09:59:05 +00:00
|
|
|
sws = cfg.getFileSDScrapeWork(swsNew)
|
2020-02-23 11:35:47 +00:00
|
|
|
if len(sws) != 0 {
|
|
|
|
t.Fatalf("unexpected non-empty sws:\n%#v", sws)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-12-08 15:50:03 +00:00
|
|
|
func getFileSDScrapeWork(data []byte, path string) ([]*ScrapeWork, error) {
|
2020-02-23 11:35:47 +00:00
|
|
|
var cfg Config
|
2023-10-25 21:19:33 +00:00
|
|
|
if err := cfg.parseData(data, path); err != nil {
|
2020-06-30 19:58:18 +00:00
|
|
|
return nil, fmt.Errorf("cannot parse data: %w", err)
|
2020-02-23 11:35:47 +00:00
|
|
|
}
|
2020-04-13 09:59:05 +00:00
|
|
|
return cfg.getFileSDScrapeWork(nil), nil
|
2020-02-23 11:35:47 +00:00
|
|
|
}
|
|
|
|
|
2020-12-08 15:50:03 +00:00
|
|
|
func getStaticScrapeWork(data []byte, path string) ([]*ScrapeWork, error) {
|
2020-02-23 11:35:47 +00:00
|
|
|
var cfg Config
|
2023-10-25 21:19:33 +00:00
|
|
|
if err := cfg.parseData(data, path); err != nil {
|
2020-06-30 19:58:18 +00:00
|
|
|
return nil, fmt.Errorf("cannot parse data: %w", err)
|
2020-02-23 11:35:47 +00:00
|
|
|
}
|
2020-04-13 09:59:05 +00:00
|
|
|
return cfg.getStaticScrapeWork(), nil
|
2020-02-23 11:35:47 +00:00
|
|
|
}
|
|
|
|
|
2023-10-25 21:19:33 +00:00
|
|
|
func TestGetStaticScrapeWorkFailure(t *testing.T) {
|
|
|
|
f := func(data string) {
|
2020-02-23 11:35:47 +00:00
|
|
|
t.Helper()
|
|
|
|
sws, err := getStaticScrapeWork([]byte(data), "non-existing-file")
|
2023-10-25 21:19:33 +00:00
|
|
|
if err == nil {
|
|
|
|
t.Fatalf("expecting non-nil error")
|
2020-02-23 11:35:47 +00:00
|
|
|
}
|
2023-10-25 21:19:33 +00:00
|
|
|
if sws != nil {
|
|
|
|
t.Fatalf("expecting nil sws")
|
2020-02-23 11:35:47 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// incorrect yaml
|
2023-10-25 21:19:33 +00:00
|
|
|
f(`foo bar baz`)
|
2020-02-23 11:35:47 +00:00
|
|
|
|
2023-10-25 21:19:33 +00:00
|
|
|
// yaml with unsupported fields
|
|
|
|
f(`foo: bar`)
|
2020-02-23 11:35:47 +00:00
|
|
|
f(`
|
|
|
|
scrape_configs:
|
2023-10-25 21:19:33 +00:00
|
|
|
- foo: bar
|
|
|
|
`)
|
|
|
|
|
|
|
|
// invalid scrape_config_files contents
|
|
|
|
f(`
|
|
|
|
scrape_config_files:
|
|
|
|
- job_name: aa
|
|
|
|
static_configs:
|
|
|
|
- targets: ["s"]
|
|
|
|
`)
|
2020-02-23 11:35:47 +00:00
|
|
|
|
2021-08-26 05:51:14 +00:00
|
|
|
// Duplicate job_name
|
|
|
|
f(`
|
|
|
|
scrape_configs:
|
|
|
|
- job_name: foo
|
|
|
|
static_configs:
|
|
|
|
targets: ["foo"]
|
|
|
|
- job_name: foo
|
|
|
|
static_configs:
|
|
|
|
targets: ["bar"]
|
2023-10-25 21:19:33 +00:00
|
|
|
`)
|
|
|
|
}
|
2020-02-23 11:35:47 +00:00
|
|
|
|
2023-10-25 21:19:33 +00:00
|
|
|
// String returns human-readable representation for sw.
|
|
|
|
func (sw *ScrapeWork) String() string {
|
2024-07-17 11:52:10 +00:00
|
|
|
return stringsutil.JSONString(sw.key())
|
2023-10-25 21:19:33 +00:00
|
|
|
}
|
2020-02-23 11:35:47 +00:00
|
|
|
|
2023-10-25 21:19:33 +00:00
|
|
|
func TestGetFileSDScrapeWorkSuccess(t *testing.T) {
|
|
|
|
f := func(data string, expectedSws []*ScrapeWork) {
|
|
|
|
t.Helper()
|
|
|
|
sws, err := getFileSDScrapeWork([]byte(data), "non-existing-file")
|
|
|
|
if err != nil {
|
|
|
|
t.Fatalf("unexpected error: %s", err)
|
|
|
|
}
|
|
|
|
checkEqualScrapeWorks(t, sws, expectedSws)
|
|
|
|
}
|
2020-02-23 11:35:47 +00:00
|
|
|
|
|
|
|
f(`
|
|
|
|
scrape_configs:
|
2023-10-25 21:19:33 +00:00
|
|
|
- job_name: foo
|
2020-02-23 11:35:47 +00:00
|
|
|
static_configs:
|
2023-10-25 21:19:33 +00:00
|
|
|
- targets: ["xxx"]
|
|
|
|
`, []*ScrapeWork{})
|
2020-02-23 11:35:47 +00:00
|
|
|
f(`
|
|
|
|
scrape_configs:
|
2023-10-25 21:19:33 +00:00
|
|
|
- job_name: foo
|
|
|
|
metrics_path: /abc/de
|
|
|
|
file_sd_configs:
|
|
|
|
- files: ["testdata/file_sd.json", "testdata/file_sd*.yml"]
|
|
|
|
`, []*ScrapeWork{
|
|
|
|
{
|
2024-08-19 20:28:49 +00:00
|
|
|
ScrapeURL: "http://host1/abc/de",
|
2023-10-25 21:19:33 +00:00
|
|
|
ScrapeInterval: defaultScrapeInterval,
|
|
|
|
ScrapeTimeout: defaultScrapeTimeout,
|
2024-06-20 11:58:42 +00:00
|
|
|
MaxScrapeSize: maxScrapeSize.N,
|
2023-10-25 21:19:33 +00:00
|
|
|
Labels: promutils.NewLabelsFromMap(map[string]string{
|
2024-08-27 11:04:26 +00:00
|
|
|
"instance": "host1:80",
|
2023-10-25 21:19:33 +00:00
|
|
|
"job": "foo",
|
|
|
|
"qwe": "rty",
|
|
|
|
}),
|
|
|
|
jobNameOriginal: "foo",
|
|
|
|
},
|
|
|
|
{
|
2024-08-19 20:28:49 +00:00
|
|
|
ScrapeURL: "http://host2/abc/de",
|
2023-10-25 21:19:33 +00:00
|
|
|
ScrapeInterval: defaultScrapeInterval,
|
|
|
|
ScrapeTimeout: defaultScrapeTimeout,
|
2024-06-20 11:58:42 +00:00
|
|
|
MaxScrapeSize: maxScrapeSize.N,
|
2023-10-25 21:19:33 +00:00
|
|
|
Labels: promutils.NewLabelsFromMap(map[string]string{
|
2024-08-27 11:04:26 +00:00
|
|
|
"instance": "host2:80",
|
2023-10-25 21:19:33 +00:00
|
|
|
"job": "foo",
|
|
|
|
"qwe": "rty",
|
|
|
|
}),
|
|
|
|
jobNameOriginal: "foo",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
ScrapeURL: "http://localhost:9090/abc/de",
|
|
|
|
ScrapeInterval: defaultScrapeInterval,
|
|
|
|
ScrapeTimeout: defaultScrapeTimeout,
|
2024-06-20 11:58:42 +00:00
|
|
|
MaxScrapeSize: maxScrapeSize.N,
|
2023-10-25 21:19:33 +00:00
|
|
|
Labels: promutils.NewLabelsFromMap(map[string]string{
|
|
|
|
"instance": "localhost:9090",
|
|
|
|
"job": "foo",
|
|
|
|
"yml": "test",
|
|
|
|
}),
|
|
|
|
jobNameOriginal: "foo",
|
|
|
|
},
|
|
|
|
})
|
|
|
|
}
|
2020-02-23 11:35:47 +00:00
|
|
|
|
2023-10-25 21:19:33 +00:00
|
|
|
func TestGetStaticScrapeWorkSuccess(t *testing.T) {
|
|
|
|
f := func(data string, expectedSws []*ScrapeWork) {
|
|
|
|
t.Helper()
|
|
|
|
sws, err := getStaticScrapeWork([]byte(data), "non-exsiting-file")
|
|
|
|
if err != nil {
|
|
|
|
t.Fatalf("unexpected error: %s", err)
|
|
|
|
}
|
|
|
|
checkEqualScrapeWorks(t, sws, expectedSws)
|
|
|
|
}
|
|
|
|
f(``, nil)
|
2021-04-03 19:13:22 +00:00
|
|
|
|
2023-10-25 21:19:33 +00:00
|
|
|
// Scrape config with missing modulus for action=hashmod in relabel_configs must be skipped
|
2021-04-03 19:13:22 +00:00
|
|
|
f(`
|
|
|
|
scrape_configs:
|
2023-10-25 21:19:33 +00:00
|
|
|
- job_name: aa
|
|
|
|
relabel_configs:
|
|
|
|
- action: hashmod
|
|
|
|
source_labels: [foo]
|
|
|
|
target_label: bar
|
2021-04-03 19:13:22 +00:00
|
|
|
static_configs:
|
2023-10-25 21:19:33 +00:00
|
|
|
- targets: ["s"]
|
|
|
|
`, []*ScrapeWork{})
|
2021-04-03 19:13:22 +00:00
|
|
|
|
2023-10-25 21:19:33 +00:00
|
|
|
// Scrape config with invalid action in relabel_configs must be skipped
|
2020-02-23 11:35:47 +00:00
|
|
|
f(`
|
|
|
|
scrape_configs:
|
2023-10-25 21:19:33 +00:00
|
|
|
- job_name: aa
|
|
|
|
relabel_configs:
|
|
|
|
- action: foobar
|
2020-02-23 11:35:47 +00:00
|
|
|
static_configs:
|
2023-10-25 21:19:33 +00:00
|
|
|
- targets: ["s"]
|
|
|
|
`, []*ScrapeWork{})
|
2020-02-23 11:35:47 +00:00
|
|
|
|
2023-10-25 21:19:33 +00:00
|
|
|
// Scrape config with missing source_labels for action=keep in relabel_configs must be skipped
|
2020-02-23 11:35:47 +00:00
|
|
|
f(`
|
|
|
|
scrape_configs:
|
|
|
|
- job_name: aa
|
2023-10-25 21:19:33 +00:00
|
|
|
relabel_configs:
|
|
|
|
- action: keep
|
2020-02-23 11:35:47 +00:00
|
|
|
static_configs:
|
|
|
|
- targets: ["s"]
|
2023-10-25 21:19:33 +00:00
|
|
|
`, []*ScrapeWork{})
|
2020-02-23 11:35:47 +00:00
|
|
|
|
2023-10-25 21:19:33 +00:00
|
|
|
// Scrape config with missing source_labels for action=drop in relabel_configs must be skipped
|
2020-02-23 11:35:47 +00:00
|
|
|
f(`
|
|
|
|
scrape_configs:
|
|
|
|
- job_name: aa
|
2023-10-25 21:19:33 +00:00
|
|
|
relabel_configs:
|
|
|
|
- action: drop
|
2020-02-23 11:35:47 +00:00
|
|
|
static_configs:
|
|
|
|
- targets: ["s"]
|
2023-10-25 21:19:33 +00:00
|
|
|
`, []*ScrapeWork{})
|
2020-02-23 11:35:47 +00:00
|
|
|
|
2023-10-25 21:19:33 +00:00
|
|
|
// Scrape config with missing source_labels for action=hashmod in relabel_configs must be skipped
|
2020-02-23 11:35:47 +00:00
|
|
|
f(`
|
|
|
|
scrape_configs:
|
|
|
|
- job_name: aa
|
2023-10-25 21:19:33 +00:00
|
|
|
relabel_configs:
|
|
|
|
- action: hashmod
|
|
|
|
target_label: bar
|
|
|
|
modulus: 123
|
2020-02-23 11:35:47 +00:00
|
|
|
static_configs:
|
|
|
|
- targets: ["s"]
|
2023-10-25 21:19:33 +00:00
|
|
|
`, []*ScrapeWork{})
|
2020-02-23 11:35:47 +00:00
|
|
|
|
2023-10-25 21:19:33 +00:00
|
|
|
// Scrape config with missing target for action=hashmod in relabel_configs must be skipped
|
2020-02-23 11:35:47 +00:00
|
|
|
f(`
|
|
|
|
scrape_configs:
|
|
|
|
- job_name: aa
|
2023-10-25 21:19:33 +00:00
|
|
|
relabel_configs:
|
|
|
|
- action: hashmod
|
|
|
|
source_labels: [foo]
|
|
|
|
modulus: 123
|
2020-02-23 11:35:47 +00:00
|
|
|
static_configs:
|
|
|
|
- targets: ["s"]
|
2023-10-25 21:19:33 +00:00
|
|
|
`, []*ScrapeWork{})
|
2020-02-23 11:35:47 +00:00
|
|
|
|
2023-10-25 21:19:33 +00:00
|
|
|
// Scrape config with invalid regex in relabel_configs must be skipped
|
2020-02-23 11:35:47 +00:00
|
|
|
f(`
|
|
|
|
scrape_configs:
|
|
|
|
- job_name: aa
|
|
|
|
relabel_configs:
|
|
|
|
- regex: "("
|
|
|
|
source_labels: [foo]
|
|
|
|
target_label: bar
|
|
|
|
static_configs:
|
|
|
|
- targets: ["s"]
|
2023-10-25 21:19:33 +00:00
|
|
|
`, []*ScrapeWork{})
|
2020-02-23 11:35:47 +00:00
|
|
|
|
2023-10-25 21:19:33 +00:00
|
|
|
// Scrape config with missing target_label for action=replace in relabel_configs must be skipped
|
2020-02-23 11:35:47 +00:00
|
|
|
f(`
|
|
|
|
scrape_configs:
|
|
|
|
- job_name: aa
|
|
|
|
relabel_configs:
|
|
|
|
- action: replace
|
|
|
|
source_labels: [foo]
|
|
|
|
static_configs:
|
|
|
|
- targets: ["s"]
|
2023-10-25 21:19:33 +00:00
|
|
|
`, []*ScrapeWork{})
|
2020-02-23 11:35:47 +00:00
|
|
|
|
2023-10-25 21:19:33 +00:00
|
|
|
// Scrape config with both `authorization` and `bearer_token` set must be skipped
|
2020-02-23 11:35:47 +00:00
|
|
|
f(`
|
|
|
|
scrape_configs:
|
2023-10-25 21:19:33 +00:00
|
|
|
- job_name: x
|
|
|
|
authorization:
|
|
|
|
credentials: foobar
|
|
|
|
bearer_token: foo
|
2020-02-23 11:35:47 +00:00
|
|
|
static_configs:
|
2023-10-25 21:19:33 +00:00
|
|
|
- targets: ["a"]
|
|
|
|
`, []*ScrapeWork{})
|
2020-02-23 11:35:47 +00:00
|
|
|
|
2023-10-25 21:19:33 +00:00
|
|
|
// Scrape config with both `bearer_token` and `bearer_token_file` set must be skipped
|
2020-02-23 11:35:47 +00:00
|
|
|
f(`
|
|
|
|
scrape_configs:
|
2023-10-25 21:19:33 +00:00
|
|
|
- job_name: x
|
|
|
|
bearer_token: foo
|
|
|
|
bearer_token_file: bar
|
2020-02-23 11:35:47 +00:00
|
|
|
static_configs:
|
2023-10-25 21:19:33 +00:00
|
|
|
- targets: ["a"]
|
|
|
|
`, []*ScrapeWork{})
|
2020-02-23 11:35:47 +00:00
|
|
|
|
2023-10-25 21:19:33 +00:00
|
|
|
// Scrape config with both `basic_auth` and `bearer_token` set must be skipped
|
2020-02-23 11:35:47 +00:00
|
|
|
f(`
|
|
|
|
scrape_configs:
|
2023-10-25 21:19:33 +00:00
|
|
|
- job_name: x
|
|
|
|
bearer_token: foo
|
|
|
|
basic_auth:
|
|
|
|
username: foo
|
|
|
|
password: bar
|
2020-02-23 11:35:47 +00:00
|
|
|
static_configs:
|
2023-10-25 21:19:33 +00:00
|
|
|
- targets: ["a"]
|
|
|
|
`, []*ScrapeWork{})
|
2020-02-23 11:35:47 +00:00
|
|
|
|
2023-10-25 21:19:33 +00:00
|
|
|
// Scrape config with both `authorization` and `basic_auth` set must be skipped
|
2020-02-23 11:35:47 +00:00
|
|
|
f(`
|
|
|
|
scrape_configs:
|
2023-10-25 21:19:33 +00:00
|
|
|
- job_name: x
|
|
|
|
authorization:
|
|
|
|
credentials: foobar
|
|
|
|
basic_auth:
|
|
|
|
username: foobar
|
2020-02-23 11:35:47 +00:00
|
|
|
static_configs:
|
2023-10-25 21:19:33 +00:00
|
|
|
- targets: ["a"]
|
|
|
|
`, []*ScrapeWork{})
|
2020-02-23 11:35:47 +00:00
|
|
|
|
2023-10-25 21:19:33 +00:00
|
|
|
// Scrape config with invalid scheme must be skipped
|
2020-02-23 11:35:47 +00:00
|
|
|
f(`
|
|
|
|
scrape_configs:
|
2023-10-25 21:19:33 +00:00
|
|
|
- job_name: x
|
|
|
|
scheme: asdf
|
2020-02-23 11:35:47 +00:00
|
|
|
static_configs:
|
2023-10-25 21:19:33 +00:00
|
|
|
- targets: ["foo"]
|
|
|
|
`, []*ScrapeWork{})
|
2020-02-23 11:35:47 +00:00
|
|
|
|
2023-10-25 21:19:33 +00:00
|
|
|
// Scrape config with missing job_name must be skipped
|
2020-02-23 11:35:47 +00:00
|
|
|
f(`
|
|
|
|
scrape_configs:
|
2023-10-25 21:19:33 +00:00
|
|
|
- static_configs:
|
|
|
|
- targets: ["foo"]
|
|
|
|
`, []*ScrapeWork{})
|
2021-08-26 05:51:14 +00:00
|
|
|
|
2023-10-25 21:19:33 +00:00
|
|
|
// Scrape config with missing username in `basic_auth` must be skipped
|
2021-08-26 05:51:14 +00:00
|
|
|
f(`
|
2023-10-25 21:19:33 +00:00
|
|
|
scrape_configs:
|
|
|
|
- job_name: x
|
|
|
|
basic_auth:
|
|
|
|
password: sss
|
2021-08-26 05:51:14 +00:00
|
|
|
static_configs:
|
2023-10-25 21:19:33 +00:00
|
|
|
- targets: ["a"]
|
|
|
|
`, []*ScrapeWork{})
|
2020-05-03 11:28:16 +00:00
|
|
|
|
2023-10-25 21:19:33 +00:00
|
|
|
// Scrape config with both password and password_file set in `basic_auth` must be skipped
|
2020-02-23 11:35:47 +00:00
|
|
|
f(`
|
|
|
|
scrape_configs:
|
2023-10-25 21:19:33 +00:00
|
|
|
- job_name: x
|
|
|
|
basic_auth:
|
|
|
|
username: foobar
|
|
|
|
password: sss
|
|
|
|
password_file: sdfdf
|
2020-02-23 11:35:47 +00:00
|
|
|
static_configs:
|
2023-10-25 21:19:33 +00:00
|
|
|
- targets: ["a"]
|
2020-12-08 15:50:03 +00:00
|
|
|
`, []*ScrapeWork{})
|
2023-10-25 21:19:33 +00:00
|
|
|
|
|
|
|
// Scrape config with invalid ca_file must be properly parsed, since ca_file may become valid later
|
2020-02-23 11:35:47 +00:00
|
|
|
f(`
|
|
|
|
scrape_configs:
|
2023-10-25 21:19:33 +00:00
|
|
|
- job_name: aa
|
|
|
|
tls_config:
|
|
|
|
ca_file: testdata/prometheus.yml
|
|
|
|
static_configs:
|
|
|
|
- targets: ["s"]
|
2020-12-08 15:50:03 +00:00
|
|
|
`, []*ScrapeWork{
|
2020-02-23 11:35:47 +00:00
|
|
|
{
|
2024-08-19 20:28:49 +00:00
|
|
|
ScrapeURL: "http://s/metrics",
|
2023-07-29 04:08:41 +00:00
|
|
|
ScrapeInterval: defaultScrapeInterval,
|
|
|
|
ScrapeTimeout: defaultScrapeTimeout,
|
2024-06-20 11:58:42 +00:00
|
|
|
MaxScrapeSize: maxScrapeSize.N,
|
2022-11-30 05:22:12 +00:00
|
|
|
Labels: promutils.NewLabelsFromMap(map[string]string{
|
2024-08-27 11:04:26 +00:00
|
|
|
"instance": "s:80",
|
2023-10-25 21:19:33 +00:00
|
|
|
"job": "aa",
|
2022-11-30 05:22:12 +00:00
|
|
|
}),
|
2023-10-25 21:19:33 +00:00
|
|
|
jobNameOriginal: "aa",
|
2020-02-23 11:35:47 +00:00
|
|
|
},
|
2023-10-25 21:19:33 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
// Scrape config with non-existing ca_file must be properly parsed, since the ca_file can become valid later
|
|
|
|
f(`
|
|
|
|
scrape_configs:
|
|
|
|
- job_name: aa
|
|
|
|
tls_config:
|
|
|
|
ca_file: non/extising/file
|
|
|
|
static_configs:
|
|
|
|
- targets: ["s"]
|
|
|
|
`, []*ScrapeWork{
|
2020-02-23 11:35:47 +00:00
|
|
|
{
|
2024-08-19 20:28:49 +00:00
|
|
|
ScrapeURL: "http://s/metrics",
|
2023-07-29 04:08:41 +00:00
|
|
|
ScrapeInterval: defaultScrapeInterval,
|
|
|
|
ScrapeTimeout: defaultScrapeTimeout,
|
2024-06-20 11:58:42 +00:00
|
|
|
MaxScrapeSize: maxScrapeSize.N,
|
2022-11-30 05:22:12 +00:00
|
|
|
Labels: promutils.NewLabelsFromMap(map[string]string{
|
2024-08-27 11:04:26 +00:00
|
|
|
"instance": "s:80",
|
2023-10-25 21:19:33 +00:00
|
|
|
"job": "aa",
|
2022-11-30 05:22:12 +00:00
|
|
|
}),
|
2023-10-25 21:19:33 +00:00
|
|
|
jobNameOriginal: "aa",
|
2020-02-23 11:35:47 +00:00
|
|
|
},
|
2023-10-25 21:19:33 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
// Scrape config with non-existing cert_file must be properly parsed, since the cert_file can become valid later
|
|
|
|
f(`
|
|
|
|
scrape_configs:
|
|
|
|
- job_name: aa
|
|
|
|
tls_config:
|
|
|
|
cert_file: non/extising/file
|
|
|
|
static_configs:
|
|
|
|
- targets: ["s"]
|
|
|
|
`, []*ScrapeWork{
|
2020-02-23 11:35:47 +00:00
|
|
|
{
|
2024-08-19 20:28:49 +00:00
|
|
|
ScrapeURL: "http://s/metrics",
|
2023-07-29 04:08:41 +00:00
|
|
|
ScrapeInterval: defaultScrapeInterval,
|
|
|
|
ScrapeTimeout: defaultScrapeTimeout,
|
2024-06-20 11:58:42 +00:00
|
|
|
MaxScrapeSize: maxScrapeSize.N,
|
2022-11-30 05:22:12 +00:00
|
|
|
Labels: promutils.NewLabelsFromMap(map[string]string{
|
2024-08-27 11:04:26 +00:00
|
|
|
"instance": "s:80",
|
2023-10-25 21:19:33 +00:00
|
|
|
"job": "aa",
|
2022-11-30 05:22:12 +00:00
|
|
|
}),
|
2023-10-25 21:19:33 +00:00
|
|
|
jobNameOriginal: "aa",
|
|
|
|
},
|
|
|
|
})
|
|
|
|
|
|
|
|
// Scrape config with non-existing key_file must be properly parsed, since the key_file can become valid later
|
|
|
|
f(`
|
|
|
|
scrape_configs:
|
|
|
|
- job_name: aa
|
|
|
|
tls_config:
|
|
|
|
key_file: non/extising/file
|
|
|
|
static_configs:
|
|
|
|
- targets: ["s"]
|
|
|
|
`, []*ScrapeWork{
|
|
|
|
{
|
2024-08-19 20:28:49 +00:00
|
|
|
ScrapeURL: "http://s/metrics",
|
2023-10-25 21:19:33 +00:00
|
|
|
ScrapeInterval: defaultScrapeInterval,
|
|
|
|
ScrapeTimeout: defaultScrapeTimeout,
|
2024-06-20 11:58:42 +00:00
|
|
|
MaxScrapeSize: maxScrapeSize.N,
|
2023-10-25 21:19:33 +00:00
|
|
|
Labels: promutils.NewLabelsFromMap(map[string]string{
|
2024-08-27 11:04:26 +00:00
|
|
|
"instance": "s:80",
|
2023-10-25 21:19:33 +00:00
|
|
|
"job": "aa",
|
|
|
|
}),
|
|
|
|
jobNameOriginal: "aa",
|
2020-02-23 11:35:47 +00:00
|
|
|
},
|
|
|
|
})
|
|
|
|
|
|
|
|
f(`
|
|
|
|
scrape_configs:
|
|
|
|
- job_name: foo
|
|
|
|
static_configs:
|
|
|
|
- targets: ["foo.bar:1234"]
|
2020-12-08 15:50:03 +00:00
|
|
|
`, []*ScrapeWork{
|
2020-02-23 11:35:47 +00:00
|
|
|
{
|
2023-07-29 04:08:41 +00:00
|
|
|
ScrapeURL: "http://foo.bar:1234/metrics",
|
|
|
|
ScrapeInterval: defaultScrapeInterval,
|
|
|
|
ScrapeTimeout: defaultScrapeTimeout,
|
2024-06-20 11:58:42 +00:00
|
|
|
MaxScrapeSize: maxScrapeSize.N,
|
2022-11-30 05:22:12 +00:00
|
|
|
Labels: promutils.NewLabelsFromMap(map[string]string{
|
|
|
|
"instance": "foo.bar:1234",
|
|
|
|
"job": "foo",
|
|
|
|
}),
|
2020-06-23 12:35:19 +00:00
|
|
|
jobNameOriginal: "foo",
|
2020-02-23 11:35:47 +00:00
|
|
|
},
|
|
|
|
})
|
|
|
|
f(`
|
|
|
|
global:
|
|
|
|
external_labels:
|
|
|
|
datacenter: foobar
|
|
|
|
jobs: xxx
|
|
|
|
scrape_configs:
|
|
|
|
- job_name: foo
|
|
|
|
static_configs:
|
|
|
|
- targets: ["foo.bar:1234"]
|
2020-12-08 15:50:03 +00:00
|
|
|
`, []*ScrapeWork{
|
2020-02-23 11:35:47 +00:00
|
|
|
{
|
2023-07-29 04:08:41 +00:00
|
|
|
ScrapeURL: "http://foo.bar:1234/metrics",
|
|
|
|
ScrapeInterval: defaultScrapeInterval,
|
|
|
|
ScrapeTimeout: defaultScrapeTimeout,
|
2024-06-20 11:58:42 +00:00
|
|
|
MaxScrapeSize: maxScrapeSize.N,
|
2022-11-30 05:22:12 +00:00
|
|
|
Labels: promutils.NewLabelsFromMap(map[string]string{
|
|
|
|
"instance": "foo.bar:1234",
|
|
|
|
"job": "foo",
|
|
|
|
}),
|
|
|
|
ExternalLabels: promutils.NewLabelsFromMap(map[string]string{
|
|
|
|
"datacenter": "foobar",
|
|
|
|
"jobs": "xxx",
|
|
|
|
}),
|
2020-06-23 12:35:19 +00:00
|
|
|
jobNameOriginal: "foo",
|
2020-02-23 11:35:47 +00:00
|
|
|
},
|
|
|
|
})
|
|
|
|
f(`
|
|
|
|
global:
|
|
|
|
scrape_interval: 8s
|
|
|
|
scrape_timeout: 34s
|
|
|
|
scrape_configs:
|
|
|
|
- job_name: foo
|
2021-09-12 10:33:39 +00:00
|
|
|
scrape_interval: 54s
|
2020-02-23 11:35:47 +00:00
|
|
|
scrape_timeout: 12s
|
|
|
|
metrics_path: /foo/bar
|
|
|
|
scheme: https
|
|
|
|
honor_labels: true
|
2023-07-29 04:08:41 +00:00
|
|
|
honor_timestamps: true
|
2021-04-02 16:56:38 +00:00
|
|
|
follow_redirects: false
|
2020-02-23 11:35:47 +00:00
|
|
|
params:
|
|
|
|
p: ["x&y", "="]
|
|
|
|
xaa:
|
2021-03-12 01:35:49 +00:00
|
|
|
proxy_url: http://foo.bar
|
2020-02-23 11:35:47 +00:00
|
|
|
static_configs:
|
|
|
|
- targets: ["foo.bar", "aaa"]
|
|
|
|
labels:
|
|
|
|
x: y
|
2021-09-12 10:33:39 +00:00
|
|
|
__scrape_timeout__: "5s"
|
2020-02-23 11:35:47 +00:00
|
|
|
- job_name: qwer
|
|
|
|
tls_config:
|
|
|
|
server_name: foobar
|
|
|
|
insecure_skip_verify: true
|
|
|
|
static_configs:
|
|
|
|
- targets: [1.2.3.4]
|
2021-04-03 19:13:22 +00:00
|
|
|
- job_name: asdf
|
|
|
|
static_configs:
|
|
|
|
- targets: [foobar]
|
2020-12-08 15:50:03 +00:00
|
|
|
`, []*ScrapeWork{
|
2020-02-23 11:35:47 +00:00
|
|
|
{
|
2024-08-19 20:28:49 +00:00
|
|
|
ScrapeURL: "https://foo.bar/foo/bar?p=x%26y&p=%3D",
|
2021-09-12 10:33:39 +00:00
|
|
|
ScrapeInterval: 54 * time.Second,
|
|
|
|
ScrapeTimeout: 5 * time.Second,
|
2024-06-20 11:58:42 +00:00
|
|
|
MaxScrapeSize: maxScrapeSize.N,
|
2020-02-23 11:35:47 +00:00
|
|
|
HonorLabels: true,
|
2023-07-29 04:08:41 +00:00
|
|
|
HonorTimestamps: true,
|
2021-04-02 16:56:38 +00:00
|
|
|
DenyRedirects: true,
|
2022-11-30 05:22:12 +00:00
|
|
|
Labels: promutils.NewLabelsFromMap(map[string]string{
|
2024-08-27 11:04:26 +00:00
|
|
|
"instance": "foo.bar:443",
|
2022-11-30 05:22:12 +00:00
|
|
|
"job": "foo",
|
|
|
|
"x": "y",
|
|
|
|
}),
|
2021-03-12 01:35:49 +00:00
|
|
|
ProxyURL: proxy.MustNewURL("http://foo.bar"),
|
2020-06-23 12:35:19 +00:00
|
|
|
jobNameOriginal: "foo",
|
2020-02-23 11:35:47 +00:00
|
|
|
},
|
|
|
|
{
|
2024-08-19 20:28:49 +00:00
|
|
|
ScrapeURL: "https://aaa/foo/bar?p=x%26y&p=%3D",
|
2021-09-12 10:33:39 +00:00
|
|
|
ScrapeInterval: 54 * time.Second,
|
|
|
|
ScrapeTimeout: 5 * time.Second,
|
2024-06-20 11:58:42 +00:00
|
|
|
MaxScrapeSize: maxScrapeSize.N,
|
2020-02-23 11:35:47 +00:00
|
|
|
HonorLabels: true,
|
2023-07-29 04:08:41 +00:00
|
|
|
HonorTimestamps: true,
|
2021-04-02 16:56:38 +00:00
|
|
|
DenyRedirects: true,
|
2022-11-30 05:22:12 +00:00
|
|
|
Labels: promutils.NewLabelsFromMap(map[string]string{
|
2024-08-27 11:04:26 +00:00
|
|
|
"instance": "aaa:443",
|
2022-11-30 05:22:12 +00:00
|
|
|
"job": "foo",
|
|
|
|
"x": "y",
|
|
|
|
}),
|
2021-03-12 01:35:49 +00:00
|
|
|
ProxyURL: proxy.MustNewURL("http://foo.bar"),
|
2020-06-23 12:35:19 +00:00
|
|
|
jobNameOriginal: "foo",
|
2020-02-23 11:35:47 +00:00
|
|
|
},
|
|
|
|
{
|
2024-08-19 20:28:49 +00:00
|
|
|
ScrapeURL: "http://1.2.3.4/metrics",
|
2023-07-29 04:08:41 +00:00
|
|
|
ScrapeInterval: 8 * time.Second,
|
|
|
|
ScrapeTimeout: 8 * time.Second,
|
2024-06-20 11:58:42 +00:00
|
|
|
MaxScrapeSize: maxScrapeSize.N,
|
2022-11-30 05:22:12 +00:00
|
|
|
Labels: promutils.NewLabelsFromMap(map[string]string{
|
2024-08-27 11:04:26 +00:00
|
|
|
"instance": "1.2.3.4:80",
|
2022-11-30 05:22:12 +00:00
|
|
|
"job": "qwer",
|
|
|
|
}),
|
2020-06-23 12:35:19 +00:00
|
|
|
jobNameOriginal: "qwer",
|
2020-02-23 11:35:47 +00:00
|
|
|
},
|
2021-04-03 19:13:22 +00:00
|
|
|
{
|
2024-08-19 20:28:49 +00:00
|
|
|
ScrapeURL: "http://foobar/metrics",
|
2023-07-29 04:08:41 +00:00
|
|
|
ScrapeInterval: 8 * time.Second,
|
|
|
|
ScrapeTimeout: 8 * time.Second,
|
2024-06-20 11:58:42 +00:00
|
|
|
MaxScrapeSize: maxScrapeSize.N,
|
2022-11-30 05:22:12 +00:00
|
|
|
Labels: promutils.NewLabelsFromMap(map[string]string{
|
2024-08-27 11:04:26 +00:00
|
|
|
"instance": "foobar:80",
|
2022-11-30 05:22:12 +00:00
|
|
|
"job": "asdf",
|
|
|
|
}),
|
2021-04-03 19:13:22 +00:00
|
|
|
jobNameOriginal: "asdf",
|
|
|
|
},
|
2020-02-23 11:35:47 +00:00
|
|
|
})
|
|
|
|
f(`
|
|
|
|
scrape_configs:
|
|
|
|
- job_name: foo
|
|
|
|
relabel_configs:
|
|
|
|
- source_labels: [__scheme__, __address__]
|
|
|
|
separator: "://"
|
|
|
|
target_label: __tmp_url
|
|
|
|
- source_labels: [__tmp_url, __metrics_path__]
|
|
|
|
separator: ""
|
|
|
|
target_label: url
|
|
|
|
- action: labeldrop
|
|
|
|
regex: "job|__tmp_.+"
|
|
|
|
- action: drop
|
|
|
|
source_labels: [__address__]
|
|
|
|
regex: "drop-.*"
|
|
|
|
- action: keep
|
|
|
|
source_labels: [__param_x]
|
|
|
|
regex: keep_me
|
|
|
|
- action: labelkeep
|
|
|
|
regex: "__.*|url"
|
|
|
|
- action: labelmap
|
|
|
|
regex: "(url)"
|
|
|
|
replacement: "prefix:${1}"
|
|
|
|
- action: hashmod
|
|
|
|
modulus: 123
|
|
|
|
source_labels: [__address__]
|
|
|
|
target_label: hash
|
|
|
|
- action: replace
|
|
|
|
source_labels: [__address__]
|
|
|
|
target_label: foobar
|
|
|
|
replacement: ""
|
|
|
|
params:
|
|
|
|
x: [keep_me]
|
|
|
|
static_configs:
|
|
|
|
- targets: ["foo.bar:1234", "drop-this-target"]
|
2020-12-08 15:50:03 +00:00
|
|
|
`, []*ScrapeWork{
|
2020-02-23 11:35:47 +00:00
|
|
|
{
|
2023-07-29 04:08:41 +00:00
|
|
|
ScrapeURL: "http://foo.bar:1234/metrics?x=keep_me",
|
|
|
|
ScrapeInterval: defaultScrapeInterval,
|
|
|
|
ScrapeTimeout: defaultScrapeTimeout,
|
2024-06-20 11:58:42 +00:00
|
|
|
MaxScrapeSize: maxScrapeSize.N,
|
2022-11-30 05:22:12 +00:00
|
|
|
Labels: promutils.NewLabelsFromMap(map[string]string{
|
|
|
|
"hash": "82",
|
|
|
|
"instance": "foo.bar:1234",
|
|
|
|
"prefix:url": "http://foo.bar:1234/metrics",
|
|
|
|
"url": "http://foo.bar:1234/metrics",
|
|
|
|
}),
|
2020-06-23 12:35:19 +00:00
|
|
|
jobNameOriginal: "foo",
|
2020-02-23 11:35:47 +00:00
|
|
|
},
|
|
|
|
})
|
|
|
|
f(`
|
|
|
|
scrape_configs:
|
|
|
|
- job_name: foo
|
|
|
|
scheme: https
|
|
|
|
relabel_configs:
|
|
|
|
- action: replace
|
|
|
|
source_labels: [non-existing-label]
|
|
|
|
target_label: instance
|
|
|
|
replacement: fake.addr
|
|
|
|
- action: replace
|
|
|
|
source_labels: [__address__]
|
|
|
|
target_label: foobar
|
|
|
|
regex: "missing-regex"
|
|
|
|
replacement: aaabbb
|
|
|
|
- action: replace
|
|
|
|
source_labels: [__scheme__]
|
|
|
|
target_label: job
|
|
|
|
- action: replace
|
|
|
|
source_labels: [__scheme__]
|
|
|
|
target_label: __scheme__
|
|
|
|
replacement: mailto
|
|
|
|
- target_label: __metrics_path__
|
|
|
|
replacement: /abc.de
|
|
|
|
- target_label: __param_a
|
|
|
|
replacement: b
|
|
|
|
static_configs:
|
|
|
|
- targets: ["foo.bar:1234"]
|
2020-12-08 15:50:03 +00:00
|
|
|
`, []*ScrapeWork{
|
2020-02-23 11:35:47 +00:00
|
|
|
{
|
2023-07-29 04:08:41 +00:00
|
|
|
ScrapeURL: "mailto://foo.bar:1234/abc.de?a=b",
|
|
|
|
ScrapeInterval: defaultScrapeInterval,
|
|
|
|
ScrapeTimeout: defaultScrapeTimeout,
|
2024-06-20 11:58:42 +00:00
|
|
|
MaxScrapeSize: maxScrapeSize.N,
|
2022-11-30 05:22:12 +00:00
|
|
|
Labels: promutils.NewLabelsFromMap(map[string]string{
|
|
|
|
"instance": "fake.addr",
|
|
|
|
"job": "https",
|
|
|
|
}),
|
2020-06-23 12:35:19 +00:00
|
|
|
jobNameOriginal: "foo",
|
2020-02-23 11:35:47 +00:00
|
|
|
},
|
|
|
|
})
|
|
|
|
f(`
|
|
|
|
scrape_configs:
|
|
|
|
- job_name: foo
|
|
|
|
scheme: https
|
2024-07-16 10:24:14 +00:00
|
|
|
max_scrape_size: 1
|
2020-02-23 11:35:47 +00:00
|
|
|
relabel_configs:
|
|
|
|
- action: keep
|
|
|
|
source_labels: [__address__]
|
|
|
|
regex: "foo\\.bar:.*"
|
|
|
|
- action: hashmod
|
|
|
|
source_labels: [job]
|
|
|
|
modulus: 4
|
|
|
|
target_label: job
|
|
|
|
- action: labeldrop
|
|
|
|
regex: "non-matching-regex"
|
|
|
|
- action: labelkeep
|
|
|
|
regex: "job|__address__"
|
|
|
|
- action: labeldrop
|
|
|
|
regex: ""
|
|
|
|
static_configs:
|
|
|
|
- targets: ["foo.bar:1234", "xyz"]
|
2020-12-08 15:50:03 +00:00
|
|
|
`, []*ScrapeWork{
|
2020-02-23 11:35:47 +00:00
|
|
|
{
|
2023-07-29 04:08:41 +00:00
|
|
|
ScrapeURL: "http://foo.bar:1234/metrics",
|
|
|
|
ScrapeInterval: defaultScrapeInterval,
|
|
|
|
ScrapeTimeout: defaultScrapeTimeout,
|
2024-07-16 10:24:14 +00:00
|
|
|
MaxScrapeSize: 1,
|
2022-11-30 05:22:12 +00:00
|
|
|
Labels: promutils.NewLabelsFromMap(map[string]string{
|
|
|
|
"instance": "foo.bar:1234",
|
|
|
|
"job": "3",
|
|
|
|
}),
|
2020-06-23 12:35:19 +00:00
|
|
|
jobNameOriginal: "foo",
|
2020-02-23 11:35:47 +00:00
|
|
|
},
|
|
|
|
})
|
2020-06-18 23:20:29 +00:00
|
|
|
|
2020-02-23 11:35:47 +00:00
|
|
|
f(`
|
|
|
|
scrape_configs:
|
|
|
|
- job_name: foo
|
2024-06-20 11:58:42 +00:00
|
|
|
max_scrape_size: 8MiB
|
2020-02-23 11:35:47 +00:00
|
|
|
metric_relabel_configs:
|
|
|
|
- source_labels: [foo]
|
|
|
|
target_label: abc
|
|
|
|
static_configs:
|
|
|
|
- targets: ["foo.bar:1234"]
|
2020-12-08 15:50:03 +00:00
|
|
|
`, []*ScrapeWork{
|
2020-02-23 11:35:47 +00:00
|
|
|
{
|
2023-07-29 04:08:41 +00:00
|
|
|
ScrapeURL: "http://foo.bar:1234/metrics",
|
|
|
|
ScrapeInterval: defaultScrapeInterval,
|
|
|
|
ScrapeTimeout: defaultScrapeTimeout,
|
2024-06-20 11:58:42 +00:00
|
|
|
MaxScrapeSize: 8 * 1024 * 1024,
|
2022-11-30 05:22:12 +00:00
|
|
|
Labels: promutils.NewLabelsFromMap(map[string]string{
|
|
|
|
"instance": "foo.bar:1234",
|
|
|
|
"job": "foo",
|
|
|
|
}),
|
2021-02-22 14:33:55 +00:00
|
|
|
jobNameOriginal: "foo",
|
2020-02-23 11:35:47 +00:00
|
|
|
},
|
|
|
|
})
|
|
|
|
f(`
|
|
|
|
scrape_configs:
|
|
|
|
- job_name: foo
|
|
|
|
static_configs:
|
|
|
|
- targets: ["foo.bar:1234"]
|
2020-12-08 15:50:03 +00:00
|
|
|
`, []*ScrapeWork{
|
2020-02-23 11:35:47 +00:00
|
|
|
{
|
2023-07-29 04:08:41 +00:00
|
|
|
ScrapeURL: "http://foo.bar:1234/metrics",
|
|
|
|
ScrapeInterval: defaultScrapeInterval,
|
|
|
|
ScrapeTimeout: defaultScrapeTimeout,
|
2024-06-20 11:58:42 +00:00
|
|
|
MaxScrapeSize: maxScrapeSize.N,
|
2022-11-30 05:22:12 +00:00
|
|
|
Labels: promutils.NewLabelsFromMap(map[string]string{
|
|
|
|
"instance": "foo.bar:1234",
|
|
|
|
"job": "foo",
|
|
|
|
}),
|
2020-06-23 12:35:19 +00:00
|
|
|
jobNameOriginal: "foo",
|
2020-02-23 11:35:47 +00:00
|
|
|
},
|
|
|
|
})
|
|
|
|
f(`
|
|
|
|
scrape_configs:
|
|
|
|
- job_name: foo
|
|
|
|
static_configs:
|
|
|
|
- targets: ["foo.bar:1234"]
|
2020-12-08 15:50:03 +00:00
|
|
|
`, []*ScrapeWork{
|
2020-02-23 11:35:47 +00:00
|
|
|
{
|
2023-07-29 04:08:41 +00:00
|
|
|
ScrapeURL: "http://foo.bar:1234/metrics",
|
|
|
|
ScrapeInterval: defaultScrapeInterval,
|
|
|
|
ScrapeTimeout: defaultScrapeTimeout,
|
2024-06-20 11:58:42 +00:00
|
|
|
MaxScrapeSize: maxScrapeSize.N,
|
2022-11-30 05:22:12 +00:00
|
|
|
Labels: promutils.NewLabelsFromMap(map[string]string{
|
|
|
|
"instance": "foo.bar:1234",
|
|
|
|
"job": "foo",
|
|
|
|
}),
|
2020-06-23 12:35:19 +00:00
|
|
|
jobNameOriginal: "foo",
|
2020-02-23 11:35:47 +00:00
|
|
|
},
|
|
|
|
})
|
2020-02-25 18:49:04 +00:00
|
|
|
f(`
|
2020-03-12 18:17:13 +00:00
|
|
|
global:
|
|
|
|
external_labels:
|
|
|
|
job: foobar
|
|
|
|
foo: xx
|
|
|
|
q: qwe
|
|
|
|
__address__: aaasdf
|
|
|
|
__param_a: jlfd
|
|
|
|
scrape_configs:
|
|
|
|
- job_name: aaa
|
|
|
|
params:
|
|
|
|
a: [b, xy]
|
|
|
|
static_configs:
|
|
|
|
- targets: ["a"]
|
|
|
|
labels:
|
|
|
|
foo: bar
|
|
|
|
__param_a: c
|
|
|
|
__address__: pp
|
|
|
|
job: yyy
|
2020-12-08 15:50:03 +00:00
|
|
|
`, []*ScrapeWork{
|
2020-03-12 18:17:13 +00:00
|
|
|
{
|
2024-08-19 20:28:49 +00:00
|
|
|
ScrapeURL: "http://pp/metrics?a=c&a=xy",
|
2023-07-29 04:08:41 +00:00
|
|
|
ScrapeInterval: defaultScrapeInterval,
|
|
|
|
ScrapeTimeout: defaultScrapeTimeout,
|
2024-06-20 11:58:42 +00:00
|
|
|
MaxScrapeSize: maxScrapeSize.N,
|
2022-11-30 05:22:12 +00:00
|
|
|
Labels: promutils.NewLabelsFromMap(map[string]string{
|
|
|
|
"foo": "bar",
|
2024-08-27 11:04:26 +00:00
|
|
|
"instance": "pp:80",
|
2022-11-30 05:22:12 +00:00
|
|
|
"job": "yyy",
|
|
|
|
}),
|
|
|
|
ExternalLabels: promutils.NewLabelsFromMap(map[string]string{
|
|
|
|
"__address__": "aaasdf",
|
|
|
|
"__param_a": "jlfd",
|
|
|
|
"foo": "xx",
|
|
|
|
"job": "foobar",
|
|
|
|
"q": "qwe",
|
|
|
|
}),
|
2020-06-23 12:35:19 +00:00
|
|
|
jobNameOriginal: "aaa",
|
2020-03-12 18:17:13 +00:00
|
|
|
},
|
|
|
|
})
|
2022-06-22 17:38:43 +00:00
|
|
|
|
2020-03-12 18:17:13 +00:00
|
|
|
f(`
|
2020-02-25 18:49:04 +00:00
|
|
|
scrape_configs:
|
|
|
|
- job_name: 'snmp'
|
2020-07-02 11:19:11 +00:00
|
|
|
sample_limit: 100
|
|
|
|
disable_keepalive: true
|
|
|
|
disable_compression: true
|
2022-06-22 17:38:43 +00:00
|
|
|
headers:
|
|
|
|
- "My-Auth: foo-Bar"
|
|
|
|
proxy_headers:
|
|
|
|
- "Foo: bar"
|
2021-02-18 21:51:29 +00:00
|
|
|
scrape_align_interval: 1s
|
2021-03-08 09:58:25 +00:00
|
|
|
scrape_offset: 0.5s
|
2020-02-25 18:49:04 +00:00
|
|
|
static_configs:
|
|
|
|
- targets:
|
|
|
|
- 192.168.1.2 # SNMP device.
|
|
|
|
metrics_path: /snmp
|
|
|
|
params:
|
|
|
|
module: [if_mib]
|
|
|
|
relabel_configs:
|
|
|
|
- source_labels: [__address__]
|
|
|
|
target_label: __param_target
|
|
|
|
- source_labels: [__param_target]
|
|
|
|
target_label: instance
|
|
|
|
- target_label: __address__
|
|
|
|
replacement: 127.0.0.1:9116 # The SNMP exporter's real hostname:port.
|
2021-09-09 15:49:37 +00:00
|
|
|
- target_label: __series_limit__
|
|
|
|
replacement: 1234
|
2024-08-07 07:36:14 +00:00
|
|
|
- target_label: __sample_limit__
|
|
|
|
replacement: 5678
|
2021-09-09 15:49:37 +00:00
|
|
|
- target_label: __stream_parse__
|
|
|
|
replacement: true
|
2020-12-08 15:50:03 +00:00
|
|
|
`, []*ScrapeWork{
|
2020-02-25 18:49:04 +00:00
|
|
|
{
|
2023-07-29 04:08:41 +00:00
|
|
|
ScrapeURL: "http://127.0.0.1:9116/snmp?module=if_mib&target=192.168.1.2",
|
|
|
|
ScrapeInterval: defaultScrapeInterval,
|
|
|
|
ScrapeTimeout: defaultScrapeTimeout,
|
2024-06-20 11:58:42 +00:00
|
|
|
MaxScrapeSize: maxScrapeSize.N,
|
2022-11-30 05:22:12 +00:00
|
|
|
Labels: promutils.NewLabelsFromMap(map[string]string{
|
|
|
|
"instance": "192.168.1.2",
|
|
|
|
"job": "snmp",
|
|
|
|
}),
|
2024-08-07 07:36:14 +00:00
|
|
|
SampleLimit: 5678,
|
2021-02-18 21:51:29 +00:00
|
|
|
DisableKeepAlive: true,
|
|
|
|
DisableCompression: true,
|
|
|
|
StreamParse: true,
|
|
|
|
ScrapeAlignInterval: time.Second,
|
2021-03-08 09:58:25 +00:00
|
|
|
ScrapeOffset: 500 * time.Millisecond,
|
2021-09-09 15:49:37 +00:00
|
|
|
SeriesLimit: 1234,
|
2021-02-18 21:51:29 +00:00
|
|
|
jobNameOriginal: "snmp",
|
2020-02-25 18:49:04 +00:00
|
|
|
},
|
|
|
|
})
|
2020-10-29 05:39:42 +00:00
|
|
|
f(`
|
|
|
|
scrape_configs:
|
|
|
|
- job_name: path wo slash
|
2024-02-18 17:40:34 +00:00
|
|
|
enable_compression: false
|
2020-10-29 05:39:42 +00:00
|
|
|
static_configs:
|
|
|
|
- targets: ["foo.bar:1234"]
|
|
|
|
relabel_configs:
|
|
|
|
- replacement: metricspath
|
|
|
|
target_label: __metrics_path__
|
2020-12-08 15:50:03 +00:00
|
|
|
`, []*ScrapeWork{
|
2020-10-29 05:39:42 +00:00
|
|
|
{
|
2023-07-29 04:08:41 +00:00
|
|
|
ScrapeURL: "http://foo.bar:1234/metricspath",
|
|
|
|
ScrapeInterval: defaultScrapeInterval,
|
|
|
|
ScrapeTimeout: defaultScrapeTimeout,
|
2024-06-20 11:58:42 +00:00
|
|
|
MaxScrapeSize: maxScrapeSize.N,
|
2022-11-30 05:22:12 +00:00
|
|
|
Labels: promutils.NewLabelsFromMap(map[string]string{
|
|
|
|
"instance": "foo.bar:1234",
|
|
|
|
"job": "path wo slash",
|
|
|
|
}),
|
2024-02-18 17:40:34 +00:00
|
|
|
DisableCompression: true,
|
|
|
|
jobNameOriginal: "path wo slash",
|
2020-10-29 05:39:42 +00:00
|
|
|
},
|
|
|
|
})
|
2022-02-11 14:17:00 +00:00
|
|
|
f(`
|
|
|
|
global:
|
|
|
|
scrape_timeout: 1d
|
|
|
|
scrape_configs:
|
|
|
|
- job_name: foo
|
|
|
|
scrape_interval: 1w
|
|
|
|
scrape_align_interval: 1d
|
|
|
|
scrape_offset: 2d
|
2022-10-07 20:36:11 +00:00
|
|
|
no_stale_markers: true
|
2022-02-11 14:17:00 +00:00
|
|
|
static_configs:
|
|
|
|
- targets: ["foo.bar:1234"]
|
|
|
|
`, []*ScrapeWork{
|
|
|
|
{
|
|
|
|
ScrapeURL: "http://foo.bar:1234/metrics",
|
|
|
|
ScrapeInterval: time.Hour * 24 * 7,
|
|
|
|
ScrapeTimeout: time.Hour * 24,
|
|
|
|
ScrapeAlignInterval: time.Hour * 24,
|
|
|
|
ScrapeOffset: time.Hour * 24 * 2,
|
2024-06-20 11:58:42 +00:00
|
|
|
MaxScrapeSize: maxScrapeSize.N,
|
2022-10-07 20:36:11 +00:00
|
|
|
NoStaleMarkers: true,
|
2022-11-30 05:22:12 +00:00
|
|
|
Labels: promutils.NewLabelsFromMap(map[string]string{
|
|
|
|
"instance": "foo.bar:1234",
|
|
|
|
"job": "foo",
|
|
|
|
}),
|
2022-02-11 14:17:00 +00:00
|
|
|
jobNameOriginal: "foo",
|
|
|
|
},
|
|
|
|
})
|
2024-01-23 11:09:14 +00:00
|
|
|
|
|
|
|
defaultSeriesLimitPerTarget := *seriesLimitPerTarget
|
|
|
|
*seriesLimitPerTarget = 1e3
|
|
|
|
f(`
|
|
|
|
scrape_configs:
|
|
|
|
- job_name: foo
|
|
|
|
series_limit: 0
|
|
|
|
static_configs:
|
|
|
|
- targets: ["foo.bar:1234"]
|
|
|
|
`, []*ScrapeWork{
|
|
|
|
{
|
|
|
|
ScrapeURL: "http://foo.bar:1234/metrics",
|
|
|
|
ScrapeInterval: defaultScrapeInterval,
|
|
|
|
ScrapeTimeout: defaultScrapeTimeout,
|
2024-06-20 11:58:42 +00:00
|
|
|
MaxScrapeSize: maxScrapeSize.N,
|
2024-01-23 11:09:14 +00:00
|
|
|
jobNameOriginal: "foo",
|
|
|
|
Labels: promutils.NewLabelsFromMap(map[string]string{
|
|
|
|
"instance": "foo.bar:1234",
|
|
|
|
"job": "foo",
|
|
|
|
}),
|
|
|
|
SeriesLimit: 0,
|
|
|
|
},
|
|
|
|
})
|
|
|
|
*seriesLimitPerTarget = defaultSeriesLimitPerTarget
|
2020-02-23 11:35:47 +00:00
|
|
|
}
|
|
|
|
|
2020-12-08 15:50:03 +00:00
|
|
|
func equalStaticConfigForScrapeWorks(a, b []*ScrapeWork) bool {
|
2020-05-03 09:41:13 +00:00
|
|
|
if len(a) != len(b) {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
for i := range a {
|
|
|
|
keyA := a[i].key()
|
|
|
|
keyB := b[i].key()
|
|
|
|
if keyA != keyB {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return true
|
|
|
|
}
|
2022-05-06 21:02:54 +00:00
|
|
|
|
|
|
|
func TestScrapeConfigClone(t *testing.T) {
|
|
|
|
f := func(sc *ScrapeConfig) {
|
|
|
|
t.Helper()
|
|
|
|
scCopy := sc.clone()
|
2022-09-30 07:38:44 +00:00
|
|
|
scJSON := sc.marshalJSON()
|
|
|
|
scCopyJSON := scCopy.marshalJSON()
|
|
|
|
if !reflect.DeepEqual(scJSON, scCopyJSON) {
|
|
|
|
t.Fatalf("unexpected cloned result:\ngot\n%s\nwant\n%s", scCopyJSON, scJSON)
|
2022-05-06 21:02:54 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
f(&ScrapeConfig{})
|
|
|
|
|
|
|
|
var ie promrelabel.IfExpression
|
|
|
|
if err := ie.Parse(`{foo=~"bar",baz!="z"}`); err != nil {
|
|
|
|
t.Fatalf("unexpected error: %s", err)
|
|
|
|
}
|
|
|
|
f(&ScrapeConfig{
|
2023-07-29 04:08:41 +00:00
|
|
|
JobName: "foo",
|
|
|
|
ScrapeInterval: promutils.NewDuration(time.Second * 47),
|
|
|
|
HonorLabels: true,
|
2022-05-06 21:02:54 +00:00
|
|
|
Params: map[string][]string{
|
|
|
|
"foo": {"bar", "baz"},
|
|
|
|
},
|
|
|
|
HTTPClientConfig: promauth.HTTPClientConfig{
|
|
|
|
Authorization: &promauth.Authorization{
|
|
|
|
Credentials: promauth.NewSecret("foo"),
|
|
|
|
},
|
|
|
|
BasicAuth: &promauth.BasicAuthConfig{
|
|
|
|
Username: "user_x",
|
|
|
|
Password: promauth.NewSecret("pass_x"),
|
|
|
|
},
|
|
|
|
BearerToken: promauth.NewSecret("zx"),
|
|
|
|
OAuth2: &promauth.OAuth2Config{
|
|
|
|
ClientSecret: promauth.NewSecret("aa"),
|
|
|
|
Scopes: []string{"foo", "bar"},
|
|
|
|
TLSConfig: &promauth.TLSConfig{
|
|
|
|
CertFile: "foo",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
TLSConfig: &promauth.TLSConfig{
|
|
|
|
KeyFile: "aaa",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
ProxyURL: proxy.MustNewURL("https://foo.bar:3434/assdf/dsfd?sdf=dsf"),
|
|
|
|
RelabelConfigs: []promrelabel.RelabelConfig{{
|
|
|
|
SourceLabels: []string{"foo", "aaa"},
|
|
|
|
Regex: &promrelabel.MultiLineRegex{
|
|
|
|
S: "foo\nbar",
|
|
|
|
},
|
|
|
|
If: &ie,
|
|
|
|
}},
|
|
|
|
SampleLimit: 10,
|
|
|
|
GCESDConfigs: []gce.SDConfig{{
|
|
|
|
Project: "foo",
|
|
|
|
Zone: gce.ZoneYAML{
|
|
|
|
Zones: []string{"a", "b"},
|
|
|
|
},
|
|
|
|
}},
|
|
|
|
StreamParse: true,
|
|
|
|
ProxyClientConfig: promauth.ProxyClientConfig{
|
|
|
|
BearerTokenFile: "foo",
|
|
|
|
},
|
|
|
|
})
|
|
|
|
}
|
2023-10-25 21:19:33 +00:00
|
|
|
|
|
|
|
func checkEqualScrapeWorks(t *testing.T, got, want []*ScrapeWork) {
|
|
|
|
t.Helper()
|
|
|
|
|
|
|
|
if len(got) != len(want) {
|
|
|
|
t.Fatalf("unexpected number of ScrapeWork items; got %d; want %d", len(got), len(want))
|
|
|
|
}
|
|
|
|
for i := range got {
|
|
|
|
gotItem := *got[i]
|
|
|
|
wantItem := want[i]
|
|
|
|
|
|
|
|
// Zero fields with internal state before comparing the items.
|
|
|
|
gotItem.ProxyAuthConfig = nil
|
|
|
|
gotItem.AuthConfig = nil
|
|
|
|
gotItem.OriginalLabels = nil
|
|
|
|
gotItem.RelabelConfigs = nil
|
|
|
|
gotItem.MetricRelabelConfigs = nil
|
|
|
|
|
|
|
|
if !reflect.DeepEqual(&gotItem, wantItem) {
|
|
|
|
t.Fatalf("unexpected scrapeWork at position %d out of %d;\ngot\n%#v\nwant\n%#v", i, len(got), &gotItem, wantItem)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|