mirror of
https://github.com/VictoriaMetrics/VictoriaMetrics.git
synced 2024-11-21 14:44:00 +00:00
1. drop jobs with invalid config with error log
2. only read oauth2's client_secret_file when performing scrape
This commit is contained in:
parent
bea3431ed1
commit
7d9a702968
5 changed files with 188 additions and 118 deletions
|
@ -48,6 +48,7 @@ ssue](https://github.com/VictoriaMetrics/VictoriaMetrics/issues/4825) and [these
|
||||||
* BUGFIX: [Official Grafana dashboards for VictoriaMetrics](https://grafana.com/orgs/victoriametrics): fix display of ingested rows rate for `Samples ingested/s` and `Samples rate` panels for vmagent's dasbhoard. Previously, not all ingested protocols were accounted in these panels. An extra panel `Rows rate` was added to `Ingestion` section to display the split for rows ingested rate by protocol.
|
* BUGFIX: [Official Grafana dashboards for VictoriaMetrics](https://grafana.com/orgs/victoriametrics): fix display of ingested rows rate for `Samples ingested/s` and `Samples rate` panels for vmagent's dasbhoard. Previously, not all ingested protocols were accounted in these panels. An extra panel `Rows rate` was added to `Ingestion` section to display the split for rows ingested rate by protocol.
|
||||||
* BUGFIX: [vmui](https://docs.victoriametrics.com/#vmui): fix the bug causing render looping when switching to heatmap.
|
* BUGFIX: [vmui](https://docs.victoriametrics.com/#vmui): fix the bug causing render looping when switching to heatmap.
|
||||||
* BUGFIX: [VictoriaMetrics enterprise](https://docs.victoriametrics.com/enterprise.html) validate `-dedup.minScrapeInterval` value and `-downsampling.period` intervals are multiples of each other. See [these docs](https://docs.victoriametrics.com/#downsampling).
|
* BUGFIX: [VictoriaMetrics enterprise](https://docs.victoriametrics.com/enterprise.html) validate `-dedup.minScrapeInterval` value and `-downsampling.period` intervals are multiples of each other. See [these docs](https://docs.victoriametrics.com/#downsampling).
|
||||||
|
* BUGFIX: [vmagent](https://docs.victoriametrics.com/vmagent.html): only print error logs for incorrect configuration of scrape jobs, before vmagent would quit directly. See [this issue](https://github.com/VictoriaMetrics/VictoriaMetrics/issues/4959).
|
||||||
|
|
||||||
## [v1.93.5](https://github.com/VictoriaMetrics/VictoriaMetrics/releases/tag/v1.93.5)
|
## [v1.93.5](https://github.com/VictoriaMetrics/VictoriaMetrics/releases/tag/v1.93.5)
|
||||||
|
|
||||||
|
|
|
@ -208,11 +208,6 @@ func newOAuth2ConfigInternal(baseDir string, o *OAuth2Config) (*oauth2ConfigInte
|
||||||
}
|
}
|
||||||
if o.ClientSecretFile != "" {
|
if o.ClientSecretFile != "" {
|
||||||
oi.clientSecretFile = fs.GetFilepath(baseDir, o.ClientSecretFile)
|
oi.clientSecretFile = fs.GetFilepath(baseDir, o.ClientSecretFile)
|
||||||
secret, err := readPasswordFromFile(oi.clientSecretFile)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("cannot read OAuth2 secret from %q: %w", oi.clientSecretFile, err)
|
|
||||||
}
|
|
||||||
oi.cfg.ClientSecret = secret
|
|
||||||
}
|
}
|
||||||
opts := &Options{
|
opts := &Options{
|
||||||
BaseDir: baseDir,
|
BaseDir: baseDir,
|
||||||
|
@ -660,6 +655,14 @@ func (actx *authContext) initFromOAuth2Config(baseDir string, o *OAuth2Config) e
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
actx.getAuthHeader = func() string {
|
actx.getAuthHeader = func() string {
|
||||||
|
if oi.clientSecretFile != "" {
|
||||||
|
secret, err := readPasswordFromFile(oi.clientSecretFile)
|
||||||
|
if err != nil {
|
||||||
|
logger.Errorf("cannot read OAuth2 secret from %q: %w", oi.clientSecretFile, err)
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
oi.cfg.ClientSecret = secret
|
||||||
|
}
|
||||||
ts, err := oi.getTokenSource()
|
ts, err := oi.getTokenSource()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Errorf("cannot get OAuth2 tokenSource: %s", err)
|
logger.Errorf("cannot get OAuth2 tokenSource: %s", err)
|
||||||
|
|
|
@ -37,13 +37,24 @@ func TestNewConfig(t *testing.T) {
|
||||||
},
|
},
|
||||||
expectHeader: "Bearer some-token",
|
expectHeader: "Bearer some-token",
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
name: "OAuth2 lazy load non-existing-file",
|
||||||
|
opts: Options{
|
||||||
|
OAuth2: &OAuth2Config{
|
||||||
|
ClientID: "some-id",
|
||||||
|
ClientSecretFile: "non-existing-file",
|
||||||
|
TokenURL: "http://localhost:8511",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
wantErr: false,
|
||||||
|
},
|
||||||
{
|
{
|
||||||
name: "OAuth2 want err",
|
name: "OAuth2 want err",
|
||||||
opts: Options{
|
opts: Options{
|
||||||
OAuth2: &OAuth2Config{
|
OAuth2: &OAuth2Config{
|
||||||
ClientID: "some-id",
|
ClientID: "some-id",
|
||||||
ClientSecret: NewSecret("some-secret"),
|
ClientSecret: NewSecret("some-secret"),
|
||||||
ClientSecretFile: "testdata/test_secretfile.txt",
|
ClientSecretFile: "testdata/non-existing-file",
|
||||||
TokenURL: "http://localhost:8511",
|
TokenURL: "http://localhost:8511",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
@ -96,6 +107,17 @@ func TestNewConfig(t *testing.T) {
|
||||||
},
|
},
|
||||||
expectHeader: "Bearer some-token",
|
expectHeader: "Bearer some-token",
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
name: "tls config with non-existing file",
|
||||||
|
opts: Options{
|
||||||
|
BearerToken: "some-token",
|
||||||
|
TLSConfig: &TLSConfig{
|
||||||
|
InsecureSkipVerify: true,
|
||||||
|
CertFile: "non-existing-file",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
wantErr: true,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
t.Run(tt.name, func(t *testing.T) {
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
|
|
@ -480,6 +480,8 @@ func (cfg *Config) parseData(data []byte, path string) ([]byte, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Initialize cfg.ScrapeConfigs
|
// Initialize cfg.ScrapeConfigs
|
||||||
|
// drop jobs with invalid config with error log
|
||||||
|
var validScrapeConfigs []*ScrapeConfig
|
||||||
for i, sc := range cfg.ScrapeConfigs {
|
for i, sc := range cfg.ScrapeConfigs {
|
||||||
// Make a copy of sc in order to remove references to `data` memory.
|
// Make a copy of sc in order to remove references to `data` memory.
|
||||||
// This should prevent from memory leaks on config reload.
|
// This should prevent from memory leaks on config reload.
|
||||||
|
@ -488,10 +490,13 @@ func (cfg *Config) parseData(data []byte, path string) ([]byte, error) {
|
||||||
|
|
||||||
swc, err := getScrapeWorkConfig(sc, cfg.baseDir, &cfg.Global)
|
swc, err := getScrapeWorkConfig(sc, cfg.baseDir, &cfg.Global)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("cannot parse `scrape_config`: %w", err)
|
logger.Errorf("cannot parse `scrape_config`: %w", err)
|
||||||
|
continue
|
||||||
}
|
}
|
||||||
sc.swc = swc
|
sc.swc = swc
|
||||||
|
validScrapeConfigs = append(validScrapeConfigs, sc)
|
||||||
}
|
}
|
||||||
|
cfg.ScrapeConfigs = validScrapeConfigs
|
||||||
return dataNew, nil
|
return dataNew, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -447,59 +447,62 @@ func getStaticScrapeWork(data []byte, path string) ([]*ScrapeWork, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestGetStaticScrapeWorkFailure(t *testing.T) {
|
func TestGetStaticScrapeWorkFailure(t *testing.T) {
|
||||||
f := func(data string) {
|
tests := []struct {
|
||||||
t.Helper()
|
name string
|
||||||
sws, err := getStaticScrapeWork([]byte(data), "non-existing-file")
|
data string
|
||||||
if err == nil {
|
expectSws []*ScrapeWork
|
||||||
t.Fatalf("expecting non-nil error")
|
wantErr bool
|
||||||
}
|
}{
|
||||||
if sws != nil {
|
{
|
||||||
t.Fatalf("expecting nil sws")
|
name: "Incorrect yaml",
|
||||||
}
|
data: `foo bar baz`,
|
||||||
}
|
wantErr: true,
|
||||||
|
},
|
||||||
// incorrect yaml
|
{
|
||||||
f(`foo bar baz`)
|
name: "Missing job_name",
|
||||||
|
data: `
|
||||||
// Missing job_name
|
|
||||||
f(`
|
|
||||||
scrape_configs:
|
scrape_configs:
|
||||||
- static_configs:
|
- static_configs:
|
||||||
- targets: ["foo"]
|
- targets: ["foo"]
|
||||||
`)
|
`,
|
||||||
|
},
|
||||||
// Duplicate job_name
|
{
|
||||||
f(`
|
name: "Duplicate job_name",
|
||||||
|
data: `
|
||||||
scrape_configs:
|
scrape_configs:
|
||||||
- job_name: foo
|
- job_name: foo
|
||||||
static_configs:
|
static_configs:
|
||||||
targets: ["foo"]
|
- targets: ["foo"]
|
||||||
- job_name: foo
|
- job_name: foo
|
||||||
static_configs:
|
static_configs:
|
||||||
targets: ["bar"]
|
- targets: ["bar"]
|
||||||
`)
|
`,
|
||||||
|
wantErr: true,
|
||||||
// Invalid scheme
|
},
|
||||||
f(`
|
{
|
||||||
|
name: "Invalid scheme",
|
||||||
|
data: `
|
||||||
scrape_configs:
|
scrape_configs:
|
||||||
- job_name: x
|
- job_name: x
|
||||||
scheme: asdf
|
scheme: asdf
|
||||||
static_configs:
|
static_configs:
|
||||||
- targets: ["foo"]
|
- targets: ["foo"]
|
||||||
`)
|
`,
|
||||||
|
},
|
||||||
// Missing username in `basic_auth`
|
{
|
||||||
f(`
|
name: "Missing username in `basic_auth`",
|
||||||
|
data: `
|
||||||
scrape_configs:
|
scrape_configs:
|
||||||
- job_name: x
|
- job_name: x
|
||||||
basic_auth:
|
basic_auth:
|
||||||
password: sss
|
password: sss
|
||||||
static_configs:
|
static_configs:
|
||||||
- targets: ["a"]
|
- targets: ["a"]
|
||||||
`)
|
`,
|
||||||
|
},
|
||||||
// Both password and password_file set in `basic_auth`
|
{
|
||||||
f(`
|
name: "Both password and password_file set in `basic_auth`",
|
||||||
|
data: `
|
||||||
scrape_configs:
|
scrape_configs:
|
||||||
- job_name: x
|
- job_name: x
|
||||||
basic_auth:
|
basic_auth:
|
||||||
|
@ -508,10 +511,11 @@ scrape_configs:
|
||||||
password_file: sdfdf
|
password_file: sdfdf
|
||||||
static_configs:
|
static_configs:
|
||||||
- targets: ["a"]
|
- targets: ["a"]
|
||||||
`)
|
`,
|
||||||
|
},
|
||||||
// Invalid password_file set in `basic_auth`
|
{
|
||||||
f(`
|
name: "Invalid password_file set in `basic_auth`",
|
||||||
|
data: `
|
||||||
scrape_configs:
|
scrape_configs:
|
||||||
- job_name: x
|
- job_name: x
|
||||||
basic_auth:
|
basic_auth:
|
||||||
|
@ -519,20 +523,23 @@ scrape_configs:
|
||||||
password_file: ['foobar']
|
password_file: ['foobar']
|
||||||
static_configs:
|
static_configs:
|
||||||
- targets: ["a"]
|
- targets: ["a"]
|
||||||
`)
|
`,
|
||||||
|
wantErr: true,
|
||||||
// Both `bearer_token` and `bearer_token_file` are set
|
},
|
||||||
f(`
|
{
|
||||||
|
name: "Both `bearer_token` and `bearer_token_file` are set",
|
||||||
|
data: `
|
||||||
scrape_configs:
|
scrape_configs:
|
||||||
- job_name: x
|
- job_name: x
|
||||||
bearer_token: foo
|
bearer_token: foo
|
||||||
bearer_token_file: bar
|
bearer_token_file: bar
|
||||||
static_configs:
|
static_configs:
|
||||||
- targets: ["a"]
|
- targets: ["a"]
|
||||||
`)
|
`,
|
||||||
|
},
|
||||||
// Both `basic_auth` and `bearer_token` are set
|
{
|
||||||
f(`
|
name: "Both `basic_auth` and `bearer_token` are set",
|
||||||
|
data: `
|
||||||
scrape_configs:
|
scrape_configs:
|
||||||
- job_name: x
|
- job_name: x
|
||||||
bearer_token: foo
|
bearer_token: foo
|
||||||
|
@ -541,10 +548,11 @@ scrape_configs:
|
||||||
password: bar
|
password: bar
|
||||||
static_configs:
|
static_configs:
|
||||||
- targets: ["a"]
|
- targets: ["a"]
|
||||||
`)
|
`,
|
||||||
|
},
|
||||||
// Both `authorization` and `basic_auth` are set
|
{
|
||||||
f(`
|
name: "Both `authorization` and `basic_auth` are set",
|
||||||
|
data: `
|
||||||
scrape_configs:
|
scrape_configs:
|
||||||
- job_name: x
|
- job_name: x
|
||||||
authorization:
|
authorization:
|
||||||
|
@ -553,10 +561,11 @@ scrape_configs:
|
||||||
username: foobar
|
username: foobar
|
||||||
static_configs:
|
static_configs:
|
||||||
- targets: ["a"]
|
- targets: ["a"]
|
||||||
`)
|
`,
|
||||||
|
},
|
||||||
// Both `authorization` and `bearer_token` are set
|
{
|
||||||
f(`
|
name: "Both `authorization` and `bearer_token` are set",
|
||||||
|
data: `
|
||||||
scrape_configs:
|
scrape_configs:
|
||||||
- job_name: x
|
- job_name: x
|
||||||
authorization:
|
authorization:
|
||||||
|
@ -564,59 +573,66 @@ scrape_configs:
|
||||||
bearer_token: foo
|
bearer_token: foo
|
||||||
static_configs:
|
static_configs:
|
||||||
- targets: ["a"]
|
- targets: ["a"]
|
||||||
`)
|
`,
|
||||||
|
},
|
||||||
// Invalid `bearer_token_file`
|
{
|
||||||
f(`
|
name: "Invalid `bearer_token_file`",
|
||||||
|
data: `
|
||||||
scrape_configs:
|
scrape_configs:
|
||||||
- job_name: x
|
- job_name: x
|
||||||
bearer_token_file: [foobar]
|
bearer_token_file: [foobar]
|
||||||
static_configs:
|
static_configs:
|
||||||
- targets: ["a"]
|
- targets: ["a"]
|
||||||
`)
|
`,
|
||||||
|
wantErr: true,
|
||||||
// non-existing ca_file
|
},
|
||||||
f(`
|
{
|
||||||
|
name: "non-existing ca_file",
|
||||||
|
data: `
|
||||||
scrape_configs:
|
scrape_configs:
|
||||||
- job_name: aa
|
- job_name: aa
|
||||||
tls_config:
|
tls_config:
|
||||||
ca_file: non/extising/file
|
ca_file: non/extising/file
|
||||||
static_configs:
|
static_configs:
|
||||||
- targets: ["s"]
|
- targets: ["s"]
|
||||||
`)
|
`,
|
||||||
|
},
|
||||||
// invalid ca_file
|
{
|
||||||
f(`
|
name: "invalid ca_file",
|
||||||
|
data: `
|
||||||
scrape_configs:
|
scrape_configs:
|
||||||
- job_name: aa
|
- job_name: aa
|
||||||
tls_config:
|
tls_config:
|
||||||
ca_file: testdata/prometheus.yml
|
ca_file: testdata/prometheus.yml
|
||||||
static_configs:
|
static_configs:
|
||||||
- targets: ["s"]
|
- targets: ["s"]
|
||||||
`)
|
`,
|
||||||
|
},
|
||||||
// non-existing cert_file
|
{
|
||||||
f(`
|
name: "non-existing cert_file",
|
||||||
|
data: `
|
||||||
scrape_configs:
|
scrape_configs:
|
||||||
- job_name: aa
|
- job_name: aa
|
||||||
tls_config:
|
tls_config:
|
||||||
cert_file: non/extising/file
|
cert_file: non/extising/file
|
||||||
static_configs:
|
static_configs:
|
||||||
- targets: ["s"]
|
- targets: ["s"]
|
||||||
`)
|
`,
|
||||||
|
},
|
||||||
// non-existing key_file
|
{
|
||||||
f(`
|
name: "non-existing key_file",
|
||||||
|
data: `
|
||||||
scrape_configs:
|
scrape_configs:
|
||||||
- job_name: aa
|
- job_name: aa
|
||||||
tls_config:
|
tls_config:
|
||||||
key_file: non/extising/file
|
key_file: non/extising/file
|
||||||
static_configs:
|
static_configs:
|
||||||
- targets: ["s"]
|
- targets: ["s"]
|
||||||
`)
|
`,
|
||||||
|
},
|
||||||
// Invalid regex in relabel_configs
|
{
|
||||||
f(`
|
name: "Invalid regex in relabel_configs",
|
||||||
|
data: `
|
||||||
scrape_configs:
|
scrape_configs:
|
||||||
- job_name: aa
|
- job_name: aa
|
||||||
relabel_configs:
|
relabel_configs:
|
||||||
|
@ -625,10 +641,11 @@ scrape_configs:
|
||||||
target_label: bar
|
target_label: bar
|
||||||
static_configs:
|
static_configs:
|
||||||
- targets: ["s"]
|
- targets: ["s"]
|
||||||
`)
|
`,
|
||||||
|
},
|
||||||
// Missing target_label for action=replace in relabel_configs
|
{
|
||||||
f(`
|
name: "Missing target_label for action=replace in relabel_configs",
|
||||||
|
data: `
|
||||||
scrape_configs:
|
scrape_configs:
|
||||||
- job_name: aa
|
- job_name: aa
|
||||||
relabel_configs:
|
relabel_configs:
|
||||||
|
@ -636,30 +653,33 @@ scrape_configs:
|
||||||
source_labels: [foo]
|
source_labels: [foo]
|
||||||
static_configs:
|
static_configs:
|
||||||
- targets: ["s"]
|
- targets: ["s"]
|
||||||
`)
|
`,
|
||||||
|
},
|
||||||
// Missing source_labels for action=keep in relabel_configs
|
{
|
||||||
f(`
|
name: "Missing source_labels for action=keep in relabel_configs",
|
||||||
|
data: `
|
||||||
scrape_configs:
|
scrape_configs:
|
||||||
- job_name: aa
|
- job_name: aa
|
||||||
relabel_configs:
|
relabel_configs:
|
||||||
- action: keep
|
- action: keep
|
||||||
static_configs:
|
static_configs:
|
||||||
- targets: ["s"]
|
- targets: ["s"]
|
||||||
`)
|
`,
|
||||||
|
},
|
||||||
// Missing source_labels for action=drop in relabel_configs
|
{
|
||||||
f(`
|
name: "Missing source_labels for action=drop in relabel_configs",
|
||||||
|
data: `
|
||||||
scrape_configs:
|
scrape_configs:
|
||||||
- job_name: aa
|
- job_name: aa
|
||||||
relabel_configs:
|
relabel_configs:
|
||||||
- action: drop
|
- action: drop
|
||||||
static_configs:
|
static_configs:
|
||||||
- targets: ["s"]
|
- targets: ["s"]
|
||||||
`)
|
`,
|
||||||
|
},
|
||||||
// Missing source_labels for action=hashmod in relabel_configs
|
{
|
||||||
f(`
|
name: "Missing source_labels for action=hashmod in relabel_configs",
|
||||||
|
data: `
|
||||||
scrape_configs:
|
scrape_configs:
|
||||||
- job_name: aa
|
- job_name: aa
|
||||||
relabel_configs:
|
relabel_configs:
|
||||||
|
@ -668,10 +688,11 @@ scrape_configs:
|
||||||
modulus: 123
|
modulus: 123
|
||||||
static_configs:
|
static_configs:
|
||||||
- targets: ["s"]
|
- targets: ["s"]
|
||||||
`)
|
`,
|
||||||
|
},
|
||||||
// Missing target for action=hashmod in relabel_configs
|
{
|
||||||
f(`
|
name: "Missing target for action=hashmod in relabel_configs",
|
||||||
|
data: `
|
||||||
scrape_configs:
|
scrape_configs:
|
||||||
- job_name: aa
|
- job_name: aa
|
||||||
relabel_configs:
|
relabel_configs:
|
||||||
|
@ -680,10 +701,11 @@ scrape_configs:
|
||||||
modulus: 123
|
modulus: 123
|
||||||
static_configs:
|
static_configs:
|
||||||
- targets: ["s"]
|
- targets: ["s"]
|
||||||
`)
|
`,
|
||||||
|
},
|
||||||
// Missing modulus for action=hashmod in relabel_configs
|
{
|
||||||
f(`
|
name: "Missing modulus for action=hashmod in relabel_configs",
|
||||||
|
data: `
|
||||||
scrape_configs:
|
scrape_configs:
|
||||||
- job_name: aa
|
- job_name: aa
|
||||||
relabel_configs:
|
relabel_configs:
|
||||||
|
@ -692,25 +714,42 @@ scrape_configs:
|
||||||
target_label: bar
|
target_label: bar
|
||||||
static_configs:
|
static_configs:
|
||||||
- targets: ["s"]
|
- targets: ["s"]
|
||||||
`)
|
`,
|
||||||
|
},
|
||||||
// Invalid action in relabel_configs
|
{
|
||||||
f(`
|
name: "Invalid action in relabel_configs",
|
||||||
|
data: `
|
||||||
scrape_configs:
|
scrape_configs:
|
||||||
- job_name: aa
|
- job_name: aa
|
||||||
relabel_configs:
|
relabel_configs:
|
||||||
- action: foobar
|
- action: foobar
|
||||||
static_configs:
|
static_configs:
|
||||||
- targets: ["s"]
|
- targets: ["s"]
|
||||||
`)
|
`,
|
||||||
|
},
|
||||||
// Invalid scrape_config_files contents
|
{
|
||||||
f(`
|
name: "Invalid scrape_config_files contents",
|
||||||
|
data: `
|
||||||
scrape_config_files:
|
scrape_config_files:
|
||||||
- job_name: aa
|
- job_name: aa
|
||||||
static_configs:
|
static_configs:
|
||||||
- targets: ["s"]
|
- targets: ["s"]
|
||||||
`)
|
`,
|
||||||
|
wantErr: true,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
sws, err := getStaticScrapeWork([]byte(tt.data), "non-existing-file")
|
||||||
|
if (err != nil) != tt.wantErr {
|
||||||
|
t.Fatalf("failed to test %s, wantErr %v, got %v", tt.name, tt.wantErr, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if len(sws) != len(tt.expectSws) {
|
||||||
|
t.Errorf("failed to test %s, want %d sws, got %d", tt.name, len(tt.expectSws), len(sws))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func resetNonEssentialFields(sws []*ScrapeWork) {
|
func resetNonEssentialFields(sws []*ScrapeWork) {
|
||||||
|
|
Loading…
Reference in a new issue