app/vmalert: switch from table-driven tests to f-tests

This makes test code more clear and reduces the number of code lines by 500.
This also simplifies debugging tests. See https://itnext.io/f-tests-as-a-replacement-for-table-driven-tests-in-go-8814a8b19e9e

While at it, consistently use t.Fatal* instead of t.Error* across tests, since t.Error*
requires more boilerplate code, which can result in additional bugs inside tests.
While t.Error* allows writing logging errors for the same, this doesn't simplify fixing
broken tests most of the time.

This is a follow-up for a9525da8a4
This commit is contained in:
Aliaksandr Valialkin 2024-07-12 21:57:56 +02:00
parent cedbbdec30
commit 0078399788
No known key found for this signature in database
GPG key ID: 52C003EE2BCDB9EB
24 changed files with 2659 additions and 3138 deletions

View file

@ -6,88 +6,61 @@ import (
"github.com/VictoriaMetrics/VictoriaMetrics/lib/decimal"
)
func TestParseInputValue(t *testing.T) {
testCases := []struct {
input string
exp []sequenceValue
failed bool
}{
{
"",
nil,
true,
},
{
"testfailed",
nil,
true,
},
// stale doesn't support operations
{
"stalex3",
nil,
true,
},
{
"-4",
[]sequenceValue{{Value: -4}},
false,
},
{
"_",
[]sequenceValue{{Omitted: true}},
false,
},
{
"stale",
[]sequenceValue{{Value: decimal.StaleNaN}},
false,
},
{
"-4x1",
[]sequenceValue{{Value: -4}, {Value: -4}},
false,
},
{
"_x1",
[]sequenceValue{{Omitted: true}},
false,
},
{
"1+1x2 0.1 0.1+0.3x2 3.14",
[]sequenceValue{{Value: 1}, {Value: 2}, {Value: 3}, {Value: 0.1}, {Value: 0.1}, {Value: 0.4}, {Value: 0.7}, {Value: 3.14}},
false,
},
{
"2-1x4",
[]sequenceValue{{Value: 2}, {Value: 1}, {Value: 0}, {Value: -1}, {Value: -2}},
false,
},
{
"1+1x1 _ -4 stale 3+20x1",
[]sequenceValue{{Value: 1}, {Value: 2}, {Omitted: true}, {Value: -4}, {Value: decimal.StaleNaN}, {Value: 3}, {Value: 23}},
false,
},
func TestParseInputValue_Failure(t *testing.T) {
f := func(input string) {
t.Helper()
_, err := parseInputValue(input, true)
if err == nil {
t.Fatalf("expecting non-nil error")
}
}
for _, tc := range testCases {
output, err := parseInputValue(tc.input, true)
if err != nil != tc.failed {
t.Fatalf("failed to parse %s, expect %t, got %t", tc.input, tc.failed, err != nil)
f("")
f("testfailed")
// stale doesn't support operations
f("stalex3")
}
func TestParseInputValue_Success(t *testing.T) {
f := func(input string, outputExpected []sequenceValue) {
t.Helper()
output, err := parseInputValue(input, true)
if err != nil {
t.Fatalf("unexpected error in parseInputValue: %s", err)
}
if len(tc.exp) != len(output) {
t.Fatalf("expect %v, got %v", tc.exp, output)
if len(outputExpected) != len(output) {
t.Fatalf("unexpected output length; got %d; want %d", len(outputExpected), len(output))
}
for i := 0; i < len(tc.exp); i++ {
if tc.exp[i].Omitted != output[i].Omitted {
t.Fatalf("expect %v, got %v", tc.exp, output)
for i := 0; i < len(outputExpected); i++ {
if outputExpected[i].Omitted != output[i].Omitted {
t.Fatalf("unexpected Omitted field in the output\ngot\n%v\nwant\n%v", output, outputExpected)
}
if tc.exp[i].Value != output[i].Value {
if decimal.IsStaleNaN(tc.exp[i].Value) && decimal.IsStaleNaN(output[i].Value) {
if outputExpected[i].Value != output[i].Value {
if decimal.IsStaleNaN(outputExpected[i].Value) && decimal.IsStaleNaN(output[i].Value) {
continue
}
t.Fatalf("expect %v, got %v", tc.exp, output)
t.Fatalf("unexpeccted Value field in the output\ngot\n%v\nwant\n%v", output, outputExpected)
}
}
}
f("-4", []sequenceValue{{Value: -4}})
f("_", []sequenceValue{{Omitted: true}})
f("stale", []sequenceValue{{Value: decimal.StaleNaN}})
f("-4x1", []sequenceValue{{Value: -4}, {Value: -4}})
f("_x1", []sequenceValue{{Omitted: true}})
f("1+1x2 0.1 0.1+0.3x2 3.14", []sequenceValue{{Value: 1}, {Value: 2}, {Value: 3}, {Value: 0.1}, {Value: 0.1}, {Value: 0.4}, {Value: 0.7}, {Value: 3.14}})
f("2-1x4", []sequenceValue{{Value: 2}, {Value: 1}, {Value: 0}, {Value: -1}, {Value: -2}})
f("1+1x1 _ -4 stale 3+20x1", []sequenceValue{{Value: 1}, {Value: 2}, {Omitted: true}, {Value: -4}, {Value: decimal.StaleNaN}, {Value: 3}, {Value: 23}})
}

View file

@ -14,34 +14,33 @@ func TestMain(m *testing.M) {
os.Exit(m.Run())
}
func TestUnitRule(t *testing.T) {
testCases := []struct {
name string
disableGroupLabel bool
files []string
failed bool
}{
{
name: "run multi files",
files: []string{"./testdata/test1.yaml", "./testdata/test2.yaml"},
failed: false,
},
{
name: "disable group label",
disableGroupLabel: true,
files: []string{"./testdata/disable-group-label.yaml"},
failed: false,
},
{
name: "failing test",
files: []string{"./testdata/failed-test.yaml"},
failed: true,
},
}
for _, tc := range testCases {
fail := UnitTest(tc.files, tc.disableGroupLabel)
if fail != tc.failed {
t.Fatalf("failed to test %s, expect %t, got %t", tc.name, tc.failed, fail)
func TestUnitTest_Failure(t *testing.T) {
f := func(files []string) {
t.Helper()
failed := UnitTest(files, false)
if !failed {
t.Fatalf("expecting failed test")
}
}
// failing test
f([]string{"./testdata/failed-test.yaml"})
}
func TestUnitTest_Success(t *testing.T) {
f := func(disableGroupLabel bool, files []string) {
t.Helper()
failed := UnitTest(files, disableGroupLabel)
if failed {
t.Fatalf("unexpected failed test")
}
}
// run multi files
f(false, []string{"./testdata/test1.yaml", "./testdata/test2.yaml"})
// disable group label
f(true, []string{"./testdata/disable-group-label.yaml"})
}

View file

@ -23,12 +23,6 @@ func TestMain(m *testing.M) {
os.Exit(m.Run())
}
func TestParseGood(t *testing.T) {
if _, err := Parse([]string{"testdata/rules/*good.rules", "testdata/dir/*good.*"}, notifier.ValidateTemplates, true); err != nil {
t.Errorf("error parsing files %s", err)
}
}
func TestParseFromURL(t *testing.T) {
mux := http.NewServeMux()
mux.HandleFunc("/bad", func(w http.ResponseWriter, _ *http.Request) {
@ -55,127 +49,187 @@ groups:
defer srv.Close()
if _, err := Parse([]string{srv.URL + "/good-alert", srv.URL + "/good-rr"}, notifier.ValidateTemplates, true); err != nil {
t.Errorf("error parsing URLs %s", err)
t.Fatalf("error parsing URLs %s", err)
}
if _, err := Parse([]string{srv.URL + "/bad"}, notifier.ValidateTemplates, true); err == nil {
t.Errorf("expected parsing error: %s", err)
t.Fatalf("expected parsing error: %s", err)
}
}
func TestParseBad(t *testing.T) {
testCases := []struct {
path []string
expErr string
}{
{
[]string{"testdata/rules/rules_interval_bad.rules"},
"eval_offset should be smaller than interval",
},
{
[]string{"testdata/rules/rules0-bad.rules"},
"unexpected token",
},
{
[]string{"testdata/dir/rules0-bad.rules"},
"error parsing annotation",
},
{
[]string{"testdata/dir/rules1-bad.rules"},
"duplicate in file",
},
{
[]string{"testdata/dir/rules2-bad.rules"},
"function \"unknown\" not defined",
},
{
[]string{"testdata/dir/rules3-bad.rules"},
"either `record` or `alert` must be set",
},
{
[]string{"testdata/dir/rules4-bad.rules"},
"either `record` or `alert` must be set",
},
{
[]string{"testdata/rules/rules1-bad.rules"},
"bad graphite expr",
},
{
[]string{"testdata/dir/rules6-bad.rules"},
"missing ':' in header",
},
{
[]string{"http://unreachable-url"},
"failed to",
},
func TestParse_Success(t *testing.T) {
_, err := Parse([]string{"testdata/rules/*good.rules", "testdata/dir/*good.*"}, notifier.ValidateTemplates, true)
if err != nil {
t.Fatalf("error parsing files %s", err)
}
for _, tc := range testCases {
_, err := Parse(tc.path, notifier.ValidateTemplates, true)
}
func TestParse_Failure(t *testing.T) {
f := func(paths []string, errStrExpected string) {
t.Helper()
_, err := Parse(paths, notifier.ValidateTemplates, true)
if err == nil {
t.Errorf("expected to get error")
return
t.Fatalf("expected to get error")
}
if !strings.Contains(err.Error(), tc.expErr) {
t.Errorf("expected err to contain %q; got %q instead", tc.expErr, err)
if !strings.Contains(err.Error(), errStrExpected) {
t.Fatalf("expected err to contain %q; got %q instead", errStrExpected, err)
}
}
f([]string{"testdata/rules/rules_interval_bad.rules"}, "eval_offset should be smaller than interval")
f([]string{"testdata/rules/rules0-bad.rules"}, "unexpected token")
f([]string{"testdata/dir/rules0-bad.rules"}, "error parsing annotation")
f([]string{"testdata/dir/rules1-bad.rules"}, "duplicate in file")
f([]string{"testdata/dir/rules2-bad.rules"}, "function \"unknown\" not defined")
f([]string{"testdata/dir/rules3-bad.rules"}, "either `record` or `alert` must be set")
f([]string{"testdata/dir/rules4-bad.rules"}, "either `record` or `alert` must be set")
f([]string{"testdata/rules/rules1-bad.rules"}, "bad graphite expr")
f([]string{"testdata/dir/rules6-bad.rules"}, "missing ':' in header")
f([]string{"http://unreachable-url"}, "failed to")
}
func TestRule_Validate(t *testing.T) {
func TestRuleValidate(t *testing.T) {
if err := (&Rule{}).Validate(); err == nil {
t.Errorf("expected empty name error")
t.Fatalf("expected empty name error")
}
if err := (&Rule{Alert: "alert"}).Validate(); err == nil {
t.Errorf("expected empty expr error")
t.Fatalf("expected empty expr error")
}
if err := (&Rule{Alert: "alert", Expr: "test>0"}).Validate(); err != nil {
t.Errorf("expected valid rule; got %s", err)
t.Fatalf("expected valid rule; got %s", err)
}
}
func TestGroup_Validate(t *testing.T) {
testCases := []struct {
group *Group
rules []Rule
validateAnnotations bool
validateExpressions bool
expErr string
}{
{
group: &Group{},
expErr: "group name must be set",
},
{
group: &Group{
func TestGroupValidate_Failure(t *testing.T) {
f := func(group *Group, validateExpressions bool, errStrExpected string) {
t.Helper()
err := group.Validate(nil, validateExpressions)
if err == nil {
t.Fatalf("expecting non-nil error")
}
errStr := err.Error()
if !strings.Contains(errStr, errStrExpected) {
t.Fatalf("missing %q in the returned error %q", errStrExpected, errStr)
}
}
f(&Group{}, false, "group name must be set")
f(&Group{
Name: "negative interval",
Interval: promutils.NewDuration(-1),
},
expErr: "interval shouldn't be lower than 0",
},
{
group: &Group{
}, false, "interval shouldn't be lower than 0")
f(&Group{
Name: "wrong eval_offset",
Interval: promutils.NewDuration(time.Minute),
EvalOffset: promutils.NewDuration(2 * time.Minute),
},
expErr: "eval_offset should be smaller than interval",
},
{
group: &Group{
}, false, "eval_offset should be smaller than interval")
f(&Group{
Name: "wrong limit",
Limit: -1,
},
expErr: "invalid limit",
},
{
group: &Group{
}, false, "invalid limit")
f(&Group{
Name: "wrong concurrency",
Concurrency: -1,
},
expErr: "invalid concurrency",
}, false, "invalid concurrency")
f(&Group{
Name: "test",
Rules: []Rule{
{
Alert: "alert",
Expr: "up == 1",
},
{
group: &Group{
Alert: "alert",
Expr: "up == 1",
},
},
}, false, "duplicate")
f(&Group{
Name: "test",
Rules: []Rule{
{Alert: "alert", Expr: "up == 1", Labels: map[string]string{
"summary": "{{ value|query }}",
}},
{Alert: "alert", Expr: "up == 1", Labels: map[string]string{
"summary": "{{ value|query }}",
}},
},
}, false, "duplicate")
f(&Group{
Name: "test",
Rules: []Rule{
{Record: "record", Expr: "up == 1", Labels: map[string]string{
"summary": "{{ value|query }}",
}},
{Record: "record", Expr: "up == 1", Labels: map[string]string{
"summary": "{{ value|query }}",
}},
},
}, false, "duplicate")
f(&Group{
Name: "test",
Rules: []Rule{
{Alert: "alert", Expr: "up == 1", Labels: map[string]string{
"summary": "{{ value|query }}",
}},
{Alert: "alert", Expr: "up == 1", Labels: map[string]string{
"description": "{{ value|query }}",
}},
},
}, false, "duplicate")
f(&Group{
Name: "test",
Rules: []Rule{
{Record: "alert", Expr: "up == 1", Labels: map[string]string{
"summary": "{{ value|query }}",
}},
{Alert: "alert", Expr: "up == 1", Labels: map[string]string{
"summary": "{{ value|query }}",
}},
},
}, false, "duplicate")
f(&Group{
Name: "test graphite prometheus bad expr",
Type: NewGraphiteType(),
Rules: []Rule{
{
Expr: "sum(up == 0 ) by (host)",
For: promutils.NewDuration(10 * time.Millisecond),
},
{
Expr: "sumSeries(time('foo.bar',10))",
},
},
}, false, "invalid rule")
f(&Group{
Name: "test graphite inherit",
Type: NewGraphiteType(),
Rules: []Rule{
{
Expr: "sumSeries(time('foo.bar',10))",
For: promutils.NewDuration(10 * time.Millisecond),
},
{
Expr: "sum(up == 0 ) by (host)",
},
},
}, false, "either `record` or `alert` must be set")
// validate expressions
f(&Group{
Name: "test",
Rules: []Rule{
{
@ -183,11 +237,44 @@ func TestGroup_Validate(t *testing.T) {
Expr: "up | 0",
},
},
}, true, "invalid expression")
f(&Group{
Name: "test thanos",
Type: NewRawType("thanos"),
Rules: []Rule{
{Alert: "alert", Expr: "up == 1", Labels: map[string]string{
"description": "{{ value|query }}",
}},
},
expErr: "",
}, true, "unknown datasource type")
f(&Group{
Name: "test graphite",
Type: NewGraphiteType(),
Rules: []Rule{
{Alert: "alert", Expr: "up == 1", Labels: map[string]string{
"description": "some-description",
}},
},
{
group: &Group{
}, true, "bad graphite expr")
}
func TestGroupValidate_Success(t *testing.T) {
f := func(group *Group, validateAnnotations, validateExpressions bool) {
t.Helper()
var validateTplFn ValidateTplFn
if validateAnnotations {
validateTplFn = notifier.ValidateTemplates
}
err := group.Validate(validateTplFn, validateExpressions)
if err != nil {
t.Fatalf("unexpected error: %s", err)
}
}
f(&Group{
Name: "test",
Rules: []Rule{
{
@ -195,12 +282,9 @@ func TestGroup_Validate(t *testing.T) {
Expr: "up | 0",
},
},
},
expErr: "invalid expression",
validateExpressions: true,
},
{
group: &Group{
}, false, false)
f(&Group{
Name: "test",
Rules: []Rule{
{
@ -211,11 +295,10 @@ func TestGroup_Validate(t *testing.T) {
},
},
},
},
expErr: "",
},
{
group: &Group{
}, false, false)
// validate annotiations
f(&Group{
Name: "test",
Rules: []Rule{
{
@ -229,109 +312,10 @@ func TestGroup_Validate(t *testing.T) {
},
},
},
},
validateAnnotations: true,
},
{
group: &Group{
Name: "test",
Rules: []Rule{
{
Alert: "alert",
Expr: "up == 1",
},
{
Alert: "alert",
Expr: "up == 1",
},
},
},
expErr: "duplicate",
},
{
group: &Group{
Name: "test",
Rules: []Rule{
{Alert: "alert", Expr: "up == 1", Labels: map[string]string{
"summary": "{{ value|query }}",
}},
{Alert: "alert", Expr: "up == 1", Labels: map[string]string{
"summary": "{{ value|query }}",
}},
},
},
expErr: "duplicate",
},
{
group: &Group{
Name: "test",
Rules: []Rule{
{Record: "record", Expr: "up == 1", Labels: map[string]string{
"summary": "{{ value|query }}",
}},
{Record: "record", Expr: "up == 1", Labels: map[string]string{
"summary": "{{ value|query }}",
}},
},
},
expErr: "duplicate",
},
{
group: &Group{
Name: "test",
Rules: []Rule{
{Alert: "alert", Expr: "up == 1", Labels: map[string]string{
"summary": "{{ value|query }}",
}},
{Alert: "alert", Expr: "up == 1", Labels: map[string]string{
"description": "{{ value|query }}",
}},
},
},
expErr: "",
},
{
group: &Group{
Name: "test",
Rules: []Rule{
{Record: "alert", Expr: "up == 1", Labels: map[string]string{
"summary": "{{ value|query }}",
}},
{Alert: "alert", Expr: "up == 1", Labels: map[string]string{
"summary": "{{ value|query }}",
}},
},
},
expErr: "",
},
{
group: &Group{
Name: "test thanos",
Type: NewRawType("thanos"),
Rules: []Rule{
{Alert: "alert", Expr: "up == 1", Labels: map[string]string{
"description": "{{ value|query }}",
}},
},
},
validateExpressions: true,
expErr: "unknown datasource type",
},
{
group: &Group{
Name: "test graphite",
Type: NewGraphiteType(),
Rules: []Rule{
{Alert: "alert", Expr: "up == 1", Labels: map[string]string{
"description": "some-description",
}},
},
},
validateExpressions: true,
expErr: "",
},
{
group: &Group{
}, true, false)
// validate expressions
f(&Group{
Name: "test prometheus",
Type: NewPrometheusType(),
Rules: []Rule{
@ -339,154 +323,79 @@ func TestGroup_Validate(t *testing.T) {
"description": "{{ value|query }}",
}},
},
},
validateExpressions: true,
expErr: "",
},
{
group: &Group{
Name: "test graphite inherit",
Type: NewGraphiteType(),
Rules: []Rule{
{
Expr: "sumSeries(time('foo.bar',10))",
For: promutils.NewDuration(10 * time.Millisecond),
},
{
Expr: "sum(up == 0 ) by (host)",
},
},
},
},
{
group: &Group{
Name: "test graphite prometheus bad expr",
Type: NewGraphiteType(),
Rules: []Rule{
{
Expr: "sum(up == 0 ) by (host)",
For: promutils.NewDuration(10 * time.Millisecond),
},
{
Expr: "sumSeries(time('foo.bar',10))",
},
},
},
expErr: "invalid rule",
},
}
for _, tc := range testCases {
var validateTplFn ValidateTplFn
if tc.validateAnnotations {
validateTplFn = notifier.ValidateTemplates
}
err := tc.group.Validate(validateTplFn, tc.validateExpressions)
if err == nil {
if tc.expErr != "" {
t.Errorf("expected to get err %q; got nil insted", tc.expErr)
}
continue
}
if !strings.Contains(err.Error(), tc.expErr) {
t.Errorf("expected err to contain %q; got %q instead", tc.expErr, err)
}
}
}, false, true)
}
func TestHashRule(t *testing.T) {
testCases := []struct {
a, b Rule
equal bool
}{
{
Rule{Record: "record", Expr: "up == 1"},
Rule{Record: "record", Expr: "up == 1"},
true,
},
{
Rule{Alert: "alert", Expr: "up == 1"},
Rule{Alert: "alert", Expr: "up == 1"},
true,
},
{
Rule{Alert: "alert", Expr: "up == 1", Labels: map[string]string{
func TestHashRule_NotEqual(t *testing.T) {
f := func(a, b Rule) {
t.Helper()
aID, bID := HashRule(a), HashRule(b)
if aID == bID {
t.Fatalf("rule hashes mustn't be equal; got %d", aID)
}
}
f(Rule{Alert: "record", Expr: "up == 1"}, Rule{Record: "record", Expr: "up == 1"})
f(Rule{Record: "record", Expr: "up == 1"}, Rule{Record: "record", Expr: "up == 2"})
f(Rule{Alert: "alert", Expr: "up == 1", Labels: map[string]string{
"foo": "bar",
"baz": "foo",
}},
Rule{Alert: "alert", Expr: "up == 1", Labels: map[string]string{
"foo": "bar",
"baz": "foo",
}},
true,
},
{
Rule{Alert: "alert", Expr: "up == 1", Labels: map[string]string{
"foo": "bar",
"baz": "foo",
}},
Rule{Alert: "alert", Expr: "up == 1", Labels: map[string]string{
"baz": "foo",
"foo": "bar",
}},
true,
},
{
Rule{Alert: "record", Expr: "up == 1"},
Rule{Alert: "record", Expr: "up == 1"},
true,
},
{
Rule{Alert: "alert", Expr: "up == 1", For: promutils.NewDuration(time.Minute), KeepFiringFor: promutils.NewDuration(time.Minute)},
Rule{Alert: "alert", Expr: "up == 1"},
true,
},
{
Rule{Alert: "record", Expr: "up == 1"},
Rule{Record: "record", Expr: "up == 1"},
false,
},
{
Rule{Record: "record", Expr: "up == 1"},
Rule{Record: "record", Expr: "up == 2"},
false,
},
{
Rule{Alert: "alert", Expr: "up == 1", Labels: map[string]string{
"foo": "bar",
"baz": "foo",
}},
Rule{Alert: "alert", Expr: "up == 1", Labels: map[string]string{
}}, Rule{Alert: "alert", Expr: "up == 1", Labels: map[string]string{
"baz": "foo",
"foo": "baz",
}},
false,
},
{
Rule{Alert: "alert", Expr: "up == 1", Labels: map[string]string{
}})
f(Rule{Alert: "alert", Expr: "up == 1", Labels: map[string]string{
"foo": "bar",
"baz": "foo",
}},
Rule{Alert: "alert", Expr: "up == 1", Labels: map[string]string{
}}, Rule{Alert: "alert", Expr: "up == 1", Labels: map[string]string{
"baz": "foo",
}},
false,
},
{
Rule{Alert: "alert", Expr: "up == 1", Labels: map[string]string{
}})
f(Rule{Alert: "alert", Expr: "up == 1", Labels: map[string]string{
"foo": "bar",
"baz": "foo",
}},
Rule{Alert: "alert", Expr: "up == 1"},
false,
},
}
for i, tc := range testCases {
aID, bID := HashRule(tc.a), HashRule(tc.b)
if tc.equal != (aID == bID) {
t.Fatalf("missmatch for rule %d", i)
}}, Rule{Alert: "alert", Expr: "up == 1"})
}
func TestHashRule_Equal(t *testing.T) {
f := func(a, b Rule) {
t.Helper()
aID, bID := HashRule(a), HashRule(b)
if aID != bID {
t.Fatalf("rule hashes must be equal; got %d and %d", aID, bID)
}
}
f(Rule{Record: "record", Expr: "up == 1"}, Rule{Record: "record", Expr: "up == 1"})
f(Rule{Alert: "alert", Expr: "up == 1"}, Rule{Alert: "alert", Expr: "up == 1"})
f(Rule{Alert: "alert", Expr: "up == 1", Labels: map[string]string{
"foo": "bar",
"baz": "foo",
}}, Rule{Alert: "alert", Expr: "up == 1", Labels: map[string]string{
"foo": "bar",
"baz": "foo",
}})
f(Rule{Alert: "alert", Expr: "up == 1", Labels: map[string]string{
"foo": "bar",
"baz": "foo",
}}, Rule{Alert: "alert", Expr: "up == 1", Labels: map[string]string{
"baz": "foo",
"foo": "bar",
}})
f(Rule{Alert: "record", Expr: "up == 1"}, Rule{Alert: "record", Expr: "up == 1"})
f(Rule{
Alert: "alert", Expr: "up == 1", For: promutils.NewDuration(time.Minute), KeepFiringFor: promutils.NewDuration(time.Minute),
}, Rule{Alert: "alert", Expr: "up == 1"})
}
func TestGroupChecksum(t *testing.T) {

View file

@ -18,14 +18,14 @@ func TestOutput(t *testing.T) {
mustMatch := func(exp string) {
t.Helper()
if exp == "" {
if testOutput.String() != "" {
t.Errorf("expected output to be empty; got %q", testOutput.String())
return
t.Fatalf("expected output to be empty; got %q", testOutput.String())
}
}
if !strings.Contains(testOutput.String(), exp) {
t.Errorf("output %q should contain %q", testOutput.String(), exp)
t.Fatalf("output %q should contain %q", testOutput.String(), exp)
}
fmt.Println(testOutput.String())
testOutput.Reset()

View file

@ -31,26 +31,26 @@ var (
func TestVMInstantQuery(t *testing.T) {
mux := http.NewServeMux()
mux.HandleFunc("/", func(_ http.ResponseWriter, _ *http.Request) {
t.Errorf("should not be called")
t.Fatalf("should not be called")
})
c := -1
mux.HandleFunc("/api/v1/query", func(w http.ResponseWriter, r *http.Request) {
c++
if r.Method != http.MethodPost {
t.Errorf("expected POST method got %s", r.Method)
t.Fatalf("expected POST method got %s", r.Method)
}
if name, pass, _ := r.BasicAuth(); name != basicAuthName || pass != basicAuthPass {
t.Errorf("expected %s:%s as basic auth got %s:%s", basicAuthName, basicAuthPass, name, pass)
t.Fatalf("expected %s:%s as basic auth got %s:%s", basicAuthName, basicAuthPass, name, pass)
}
if r.URL.Query().Get("query") != query {
t.Errorf("expected %s in query param, got %s", query, r.URL.Query().Get("query"))
t.Fatalf("expected %s in query param, got %s", query, r.URL.Query().Get("query"))
}
timeParam := r.URL.Query().Get("time")
if timeParam == "" {
t.Errorf("expected 'time' in query param, got nil instead")
t.Fatalf("expected 'time' in query param, got nil instead")
}
if _, err := time.Parse(time.RFC3339, timeParam); err != nil {
t.Errorf("failed to parse 'time' query param %q: %s", timeParam, err)
t.Fatalf("failed to parse 'time' query param %q: %s", timeParam, err)
}
switch c {
case 0:
@ -197,13 +197,13 @@ func TestVMInstantQuery(t *testing.T) {
func TestVMInstantQueryWithRetry(t *testing.T) {
mux := http.NewServeMux()
mux.HandleFunc("/", func(_ http.ResponseWriter, _ *http.Request) {
t.Errorf("should not be called")
t.Fatalf("should not be called")
})
c := -1
mux.HandleFunc("/api/v1/query", func(w http.ResponseWriter, r *http.Request) {
c++
if r.URL.Query().Get("query") != query {
t.Errorf("expected %s in query param, got %s", query, r.URL.Query().Get("query"))
t.Fatalf("expected %s in query param, got %s", query, r.URL.Query().Get("query"))
}
switch c {
case 0:
@ -289,37 +289,37 @@ func metricsEqual(t *testing.T, gotM, expectedM []Metric) {
func TestVMRangeQuery(t *testing.T) {
mux := http.NewServeMux()
mux.HandleFunc("/", func(_ http.ResponseWriter, _ *http.Request) {
t.Errorf("should not be called")
t.Fatalf("should not be called")
})
c := -1
mux.HandleFunc("/api/v1/query_range", func(w http.ResponseWriter, r *http.Request) {
c++
if r.Method != http.MethodPost {
t.Errorf("expected POST method got %s", r.Method)
t.Fatalf("expected POST method got %s", r.Method)
}
if name, pass, _ := r.BasicAuth(); name != basicAuthName || pass != basicAuthPass {
t.Errorf("expected %s:%s as basic auth got %s:%s", basicAuthName, basicAuthPass, name, pass)
t.Fatalf("expected %s:%s as basic auth got %s:%s", basicAuthName, basicAuthPass, name, pass)
}
if r.URL.Query().Get("query") != query {
t.Errorf("expected %s in query param, got %s", query, r.URL.Query().Get("query"))
t.Fatalf("expected %s in query param, got %s", query, r.URL.Query().Get("query"))
}
startTS := r.URL.Query().Get("start")
if startTS == "" {
t.Errorf("expected 'start' in query param, got nil instead")
t.Fatalf("expected 'start' in query param, got nil instead")
}
if _, err := time.Parse(time.RFC3339, startTS); err != nil {
t.Errorf("failed to parse 'start' query param: %s", err)
t.Fatalf("failed to parse 'start' query param: %s", err)
}
endTS := r.URL.Query().Get("end")
if endTS == "" {
t.Errorf("expected 'end' in query param, got nil instead")
t.Fatalf("expected 'end' in query param, got nil instead")
}
if _, err := time.Parse(time.RFC3339, endTS); err != nil {
t.Errorf("failed to parse 'end' query param: %s", err)
t.Fatalf("failed to parse 'end' query param: %s", err)
}
step := r.URL.Query().Get("step")
if step != "15s" {
t.Errorf("expected 'step' query param to be 15s; got %q instead", step)
t.Fatalf("expected 'step' query param to be 15s; got %q instead", step)
}
switch c {
case 0:
@ -370,183 +370,160 @@ func TestVMRangeQuery(t *testing.T) {
}
func TestRequestParams(t *testing.T) {
authCfg, err := baCfg.NewConfig(".")
if err != nil {
t.Fatalf("unexpected: %s", err)
}
query := "up"
timestamp := time.Date(2001, 2, 3, 4, 5, 6, 0, time.UTC)
f := func(isQueryRange bool, vm *VMStorage, checkFn func(t *testing.T, r *http.Request)) {
t.Helper()
req, err := vm.newRequest(ctx)
if err != nil {
t.Fatalf("error in newRequest: %s", err)
}
switch vm.dataSourceType {
case "", datasourcePrometheus:
if isQueryRange {
vm.setPrometheusRangeReqParams(req, query, timestamp, timestamp)
} else {
vm.setPrometheusInstantReqParams(req, query, timestamp)
}
case datasourceGraphite:
vm.setGraphiteReqParams(req, query)
}
checkFn(t, req)
}
authCfg, err := baCfg.NewConfig(".")
if err != nil {
t.Fatalf("unexpected error: %s", err)
}
storage := VMStorage{
extraParams: url.Values{"round_digits": {"10"}},
}
testCases := []struct {
name string
queryRange bool
vm *VMStorage
checkFn func(t *testing.T, r *http.Request)
}{
{
"prometheus path",
false,
&VMStorage{
// prometheus path
f(false, &VMStorage{
dataSourceType: datasourcePrometheus,
},
func(t *testing.T, r *http.Request) {
}, func(t *testing.T, r *http.Request) {
checkEqualString(t, "/api/v1/query", r.URL.Path)
},
},
{
"prometheus prefix",
false,
&VMStorage{
})
// prometheus prefix
f(false, &VMStorage{
dataSourceType: datasourcePrometheus,
appendTypePrefix: true,
},
func(t *testing.T, r *http.Request) {
}, func(t *testing.T, r *http.Request) {
checkEqualString(t, "/prometheus/api/v1/query", r.URL.Path)
},
},
{
"prometheus range path",
true,
&VMStorage{
})
// prometheus range path
f(true, &VMStorage{
dataSourceType: datasourcePrometheus,
},
func(t *testing.T, r *http.Request) {
}, func(t *testing.T, r *http.Request) {
checkEqualString(t, "/api/v1/query_range", r.URL.Path)
},
},
{
"prometheus range prefix",
true,
&VMStorage{
})
// prometheus range prefix
f(true, &VMStorage{
dataSourceType: datasourcePrometheus,
appendTypePrefix: true,
},
func(t *testing.T, r *http.Request) {
}, func(t *testing.T, r *http.Request) {
checkEqualString(t, "/prometheus/api/v1/query_range", r.URL.Path)
},
},
{
"graphite path",
false,
&VMStorage{
})
// graphite path
f(false, &VMStorage{
dataSourceType: datasourceGraphite,
},
func(t *testing.T, r *http.Request) {
}, func(t *testing.T, r *http.Request) {
checkEqualString(t, graphitePath, r.URL.Path)
},
},
{
"graphite prefix",
false,
&VMStorage{
})
// graphite prefix
f(false, &VMStorage{
dataSourceType: datasourceGraphite,
appendTypePrefix: true,
},
func(t *testing.T, r *http.Request) {
}, func(t *testing.T, r *http.Request) {
checkEqualString(t, graphitePrefix+graphitePath, r.URL.Path)
},
},
{
"default params",
false,
&VMStorage{},
func(t *testing.T, r *http.Request) {
})
// default params
f(false, &VMStorage{}, func(t *testing.T, r *http.Request) {
exp := url.Values{"query": {query}, "time": {timestamp.Format(time.RFC3339)}}
checkEqualString(t, exp.Encode(), r.URL.RawQuery)
},
},
{
"default range params",
true,
&VMStorage{},
func(t *testing.T, r *http.Request) {
})
// default range params
f(true, &VMStorage{}, func(t *testing.T, r *http.Request) {
ts := timestamp.Format(time.RFC3339)
exp := url.Values{"query": {query}, "start": {ts}, "end": {ts}}
checkEqualString(t, exp.Encode(), r.URL.RawQuery)
},
},
{
"basic auth",
false,
&VMStorage{authCfg: authCfg},
func(t *testing.T, r *http.Request) {
})
// basic auth
f(false, &VMStorage{
authCfg: authCfg,
}, func(t *testing.T, r *http.Request) {
u, p, _ := r.BasicAuth()
checkEqualString(t, "foo", u)
checkEqualString(t, "bar", p)
},
},
{
"basic auth range",
true,
&VMStorage{authCfg: authCfg},
func(t *testing.T, r *http.Request) {
})
// basic auth range
f(true, &VMStorage{
authCfg: authCfg,
}, func(t *testing.T, r *http.Request) {
u, p, _ := r.BasicAuth()
checkEqualString(t, "foo", u)
checkEqualString(t, "bar", p)
},
},
{
"evaluation interval",
false,
&VMStorage{
})
// evaluation interval
f(false, &VMStorage{
evaluationInterval: 15 * time.Second,
},
func(t *testing.T, r *http.Request) {
}, func(t *testing.T, r *http.Request) {
evalInterval := 15 * time.Second
exp := url.Values{"query": {query}, "step": {evalInterval.String()}, "time": {timestamp.Format(time.RFC3339)}}
checkEqualString(t, exp.Encode(), r.URL.RawQuery)
},
},
{
"step override",
false,
&VMStorage{
})
// step override
f(false, &VMStorage{
queryStep: time.Minute,
},
func(t *testing.T, r *http.Request) {
}, func(t *testing.T, r *http.Request) {
exp := url.Values{
"query": {query},
"step": {fmt.Sprintf("%ds", int(time.Minute.Seconds()))},
"time": {timestamp.Format(time.RFC3339)},
}
checkEqualString(t, exp.Encode(), r.URL.RawQuery)
},
},
{
"step to seconds",
false,
&VMStorage{
})
// step to seconds
f(false, &VMStorage{
evaluationInterval: 3 * time.Hour,
},
func(t *testing.T, r *http.Request) {
}, func(t *testing.T, r *http.Request) {
evalInterval := 3 * time.Hour
exp := url.Values{"query": {query}, "step": {fmt.Sprintf("%ds", int(evalInterval.Seconds()))}, "time": {timestamp.Format(time.RFC3339)}}
checkEqualString(t, exp.Encode(), r.URL.RawQuery)
},
},
{
"prometheus extra params",
false,
&VMStorage{
})
// prometheus extra params
f(false, &VMStorage{
extraParams: url.Values{"round_digits": {"10"}},
},
func(t *testing.T, r *http.Request) {
}, func(t *testing.T, r *http.Request) {
exp := url.Values{"query": {query}, "round_digits": {"10"}, "time": {timestamp.Format(time.RFC3339)}}
checkEqualString(t, exp.Encode(), r.URL.RawQuery)
},
},
{
"prometheus extra params range",
true,
&VMStorage{
})
// prometheus extra params range
f(true, &VMStorage{
extraParams: url.Values{
"nocache": {"1"},
"max_lookback": {"1h"},
},
},
func(t *testing.T, r *http.Request) {
}, func(t *testing.T, r *http.Request) {
exp := url.Values{
"query": {query},
"end": {timestamp.Format(time.RFC3339)},
@ -555,143 +532,108 @@ func TestRequestParams(t *testing.T) {
"max_lookback": {"1h"},
}
checkEqualString(t, exp.Encode(), r.URL.RawQuery)
},
},
{
"custom params overrides the original params",
false,
storage.Clone().ApplyParams(QuerierParams{
})
// custom params overrides the original params
f(false, storage.Clone().ApplyParams(QuerierParams{
QueryParams: url.Values{"round_digits": {"2"}},
}),
func(t *testing.T, r *http.Request) {
}), func(t *testing.T, r *http.Request) {
exp := url.Values{"query": {query}, "round_digits": {"2"}, "time": {timestamp.Format(time.RFC3339)}}
checkEqualString(t, exp.Encode(), r.URL.RawQuery)
},
},
{
"allow duplicates in query params",
false,
storage.Clone().ApplyParams(QuerierParams{
})
// allow duplicates in query params
f(false, storage.Clone().ApplyParams(QuerierParams{
QueryParams: url.Values{"extra_labels": {"env=dev", "foo=bar"}},
}),
func(t *testing.T, r *http.Request) {
}), func(t *testing.T, r *http.Request) {
exp := url.Values{"query": {query}, "round_digits": {"10"}, "extra_labels": {"env=dev", "foo=bar"}, "time": {timestamp.Format(time.RFC3339)}}
checkEqualString(t, exp.Encode(), r.URL.RawQuery)
},
},
{
"graphite extra params",
false,
&VMStorage{
})
// graphite extra params
f(false, &VMStorage{
dataSourceType: datasourceGraphite,
extraParams: url.Values{
"nocache": {"1"},
"max_lookback": {"1h"},
},
},
func(t *testing.T, r *http.Request) {
}, func(t *testing.T, r *http.Request) {
exp := fmt.Sprintf("format=json&from=-5min&max_lookback=1h&nocache=1&target=%s&until=now", query)
checkEqualString(t, exp, r.URL.RawQuery)
},
},
{
"graphite extra params allows to override from",
false,
&VMStorage{
})
// graphite extra params allows to override from
f(false, &VMStorage{
dataSourceType: datasourceGraphite,
extraParams: url.Values{
"from": {"-10m"},
},
},
func(t *testing.T, r *http.Request) {
}, func(t *testing.T, r *http.Request) {
exp := fmt.Sprintf("format=json&from=-10m&target=%s&until=now", query)
checkEqualString(t, exp, r.URL.RawQuery)
},
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
req, err := tc.vm.newRequest(ctx)
if err != nil {
t.Fatal(err)
}
switch tc.vm.dataSourceType {
case "", datasourcePrometheus:
if tc.queryRange {
tc.vm.setPrometheusRangeReqParams(req, query, timestamp, timestamp)
} else {
tc.vm.setPrometheusInstantReqParams(req, query, timestamp)
}
case datasourceGraphite:
tc.vm.setGraphiteReqParams(req, query)
}
tc.checkFn(t, req)
})
}
}
func TestHeaders(t *testing.T) {
testCases := []struct {
name string
vmFn func() *VMStorage
checkFn func(t *testing.T, r *http.Request)
}{
{
name: "basic auth",
vmFn: func() *VMStorage {
f := func(vmFn func() *VMStorage, checkFn func(t *testing.T, r *http.Request)) {
t.Helper()
vm := vmFn()
req, err := vm.newQueryRequest(ctx, "foo", time.Now())
if err != nil {
t.Fatalf("error in newQueryRequest: %s", err)
}
checkFn(t, req)
}
// basic auth
f(func() *VMStorage {
cfg, err := utils.AuthConfig(utils.WithBasicAuth("foo", "bar", ""))
if err != nil {
t.Errorf("Error get auth config: %s", err)
t.Fatalf("Error get auth config: %s", err)
}
return &VMStorage{authCfg: cfg}
},
checkFn: func(t *testing.T, r *http.Request) {
}, func(t *testing.T, r *http.Request) {
u, p, _ := r.BasicAuth()
checkEqualString(t, "foo", u)
checkEqualString(t, "bar", p)
},
},
{
name: "bearer auth",
vmFn: func() *VMStorage {
})
// bearer auth
f(func() *VMStorage {
cfg, err := utils.AuthConfig(utils.WithBearer("foo", ""))
if err != nil {
t.Errorf("Error get auth config: %s", err)
t.Fatalf("Error get auth config: %s", err)
}
return &VMStorage{authCfg: cfg}
},
checkFn: func(t *testing.T, r *http.Request) {
}, func(t *testing.T, r *http.Request) {
reqToken := r.Header.Get("Authorization")
splitToken := strings.Split(reqToken, "Bearer ")
if len(splitToken) != 2 {
t.Errorf("expected two items got %d", len(splitToken))
t.Fatalf("expected two items got %d", len(splitToken))
}
token := splitToken[1]
checkEqualString(t, "foo", token)
},
},
{
name: "custom extraHeaders",
vmFn: func() *VMStorage {
})
// custom extraHeaders
f(func() *VMStorage {
return &VMStorage{extraHeaders: []keyValue{
{key: "Foo", value: "bar"},
{key: "Baz", value: "qux"},
}}
},
checkFn: func(t *testing.T, r *http.Request) {
}, func(t *testing.T, r *http.Request) {
h1 := r.Header.Get("Foo")
checkEqualString(t, "bar", h1)
h2 := r.Header.Get("Baz")
checkEqualString(t, "qux", h2)
},
},
{
name: "custom header overrides basic auth",
vmFn: func() *VMStorage {
})
// custom header overrides basic auth
f(func() *VMStorage {
cfg, err := utils.AuthConfig(utils.WithBasicAuth("foo", "bar", ""))
if err != nil {
t.Errorf("Error get auth config: %s", err)
t.Fatalf("Error get auth config: %s", err)
}
return &VMStorage{
authCfg: cfg,
@ -699,39 +641,28 @@ func TestHeaders(t *testing.T) {
{key: "Authorization", value: "Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ=="},
},
}
},
checkFn: func(t *testing.T, r *http.Request) {
}, func(t *testing.T, r *http.Request) {
u, p, _ := r.BasicAuth()
checkEqualString(t, "Aladdin", u)
checkEqualString(t, "open sesame", p)
},
},
}
for _, tt := range testCases {
t.Run(tt.name, func(t *testing.T) {
vm := tt.vmFn()
req, err := vm.newQueryRequest(ctx, "foo", time.Now())
if err != nil {
t.Fatal(err)
}
tt.checkFn(t, req)
})
}
}
func checkEqualString(t *testing.T, exp, got string) {
t.Helper()
if got != exp {
t.Errorf("expected to get: \n%q; \ngot: \n%q", exp, got)
t.Fatalf("expected to get: \n%q; \ngot: \n%q", exp, got)
}
}
func expectError(t *testing.T, err error, exp string) {
t.Helper()
if err == nil {
t.Errorf("expected non-nil error")
t.Fatalf("expected non-nil error")
}
if !strings.Contains(err.Error(), exp) {
t.Errorf("expected error %q to contain %q", err, exp)
t.Fatalf("expected error %q to contain %q", err, exp)
}
}

View file

@ -25,26 +25,26 @@ func TestGetExternalURL(t *testing.T) {
invalidURL := "victoriametrics.com/path"
_, err := getExternalURL(invalidURL)
if err == nil {
t.Errorf("expected error, got nil")
t.Fatalf("expected error, got nil")
}
expURL := "https://victoriametrics.com/path"
u, err := getExternalURL(expURL)
if err != nil {
t.Errorf("unexpected error %s", err)
t.Fatalf("unexpected error %s", err)
}
if u.String() != expURL {
t.Errorf("unexpected url: want %q, got %s", expURL, u.String())
t.Fatalf("unexpected url: want %q, got %s", expURL, u.String())
}
h, _ := os.Hostname()
expURL = fmt.Sprintf("http://%s:8880", h)
u, err = getExternalURL("")
if err != nil {
t.Errorf("unexpected error %s", err)
t.Fatalf("unexpected error %s", err)
}
if u.String() != expURL {
t.Errorf("unexpected url: want %s, got %s", expURL, u.String())
t.Fatalf("unexpected url: want %s, got %s", expURL, u.String())
}
}
@ -53,22 +53,22 @@ func TestGetAlertURLGenerator(t *testing.T) {
u, _ := url.Parse("https://victoriametrics.com/path")
fn, err := getAlertURLGenerator(u, "", false)
if err != nil {
t.Errorf("unexpected error %s", err)
t.Fatalf("unexpected error %s", err)
}
exp := fmt.Sprintf("https://victoriametrics.com/path/vmalert/alert?%s=42&%s=2", paramGroupID, paramAlertID)
if exp != fn(testAlert) {
t.Errorf("unexpected url want %s, got %s", exp, fn(testAlert))
t.Fatalf("unexpected url want %s, got %s", exp, fn(testAlert))
}
_, err = getAlertURLGenerator(nil, "foo?{{invalid}}", true)
if err == nil {
t.Errorf("expected template validation error got nil")
t.Fatalf("expected template validation error got nil")
}
fn, err = getAlertURLGenerator(u, "foo?query={{$value}}&ds={{ $labels.tenant }}", true)
if err != nil {
t.Errorf("unexpected error %s", err)
t.Fatalf("unexpected error %s", err)
}
if exp := "https://victoriametrics.com/path/foo?query=4&ds=baz"; exp != fn(testAlert) {
t.Errorf("unexpected url want %s, got %s", exp, fn(testAlert))
t.Fatalf("unexpected url want %s, got %s", exp, fn(testAlert))
}
}

View file

@ -82,9 +82,8 @@ func TestManagerUpdateConcurrent(t *testing.T) {
wg.Wait()
}
// TestManagerUpdate tests sequential configuration
// updates.
func TestManagerUpdate(t *testing.T) {
// TestManagerUpdate tests sequential configuration updates.
func TestManagerUpdate_Success(t *testing.T) {
const defaultEvalInterval = time.Second * 30
currentEvalInterval := *evaluationInterval
*evaluationInterval = defaultEvalInterval
@ -120,17 +119,43 @@ func TestManagerUpdate(t *testing.T) {
}
)
testCases := []struct {
name string
initPath string
updatePath string
want []*rule.Group
}{
{
name: "update good rules",
initPath: "config/testdata/rules/rules0-good.rules",
updatePath: "config/testdata/dir/rules1-good.rules",
want: []*rule.Group{
f := func(initPath, updatePath string, groupsExpected []*rule.Group) {
t.Helper()
ctx, cancel := context.WithCancel(context.TODO())
m := &manager{
groups: make(map[uint64]*rule.Group),
querierBuilder: &datasource.FakeQuerier{},
notifiers: func() []notifier.Notifier { return []notifier.Notifier{&notifier.FakeNotifier{}} },
}
cfgInit := loadCfg(t, []string{initPath}, true, true)
if err := m.update(ctx, cfgInit, false); err != nil {
t.Fatalf("failed to complete initial rules update: %s", err)
}
cfgUpdate, err := config.Parse([]string{updatePath}, notifier.ValidateTemplates, true)
if err == nil { // update can fail and that's expected
_ = m.update(ctx, cfgUpdate, false)
}
if len(groupsExpected) != len(m.groups) {
t.Fatalf("unexpected number of groups; got %d; want %d", len(m.groups), len(groupsExpected))
}
for _, wantG := range groupsExpected {
gotG, ok := m.groups[wantG.ID()]
if !ok {
t.Fatalf("expected to have group %q", wantG.Name)
}
compareGroups(t, wantG, gotG)
}
cancel()
m.close()
}
// update good rules
f("config/testdata/rules/rules0-good.rules", "config/testdata/dir/rules1-good.rules", []*rule.Group{
{
File: "config/testdata/dir/rules1-good.rules",
Name: "duplicatedGroupDiffFiles",
@ -149,116 +174,72 @@ func TestManagerUpdate(t *testing.T) {
},
},
},
},
},
{
name: "update good rules from 1 to 2 groups",
initPath: "config/testdata/dir/rules/rules1-good.rules",
updatePath: "config/testdata/rules/rules0-good.rules",
want: []*rule.Group{
{
File: "config/testdata/rules/rules0-good.rules",
Name: "groupGorSingleAlert",
Type: config.NewPrometheusType(),
Interval: defaultEvalInterval,
Rules: []rule.Rule{VMRows},
},
{
File: "config/testdata/rules/rules0-good.rules",
Interval: defaultEvalInterval,
Type: config.NewPrometheusType(),
Name: "TestGroup",
Rules: []rule.Rule{
Conns,
ExampleAlertAlwaysFiring,
},
},
},
},
{
name: "update with one bad rule file",
initPath: "config/testdata/rules/rules0-good.rules",
updatePath: "config/testdata/dir/rules2-bad.rules",
want: []*rule.Group{
{
File: "config/testdata/rules/rules0-good.rules",
Name: "groupGorSingleAlert",
Type: config.NewPrometheusType(),
Interval: defaultEvalInterval,
Rules: []rule.Rule{VMRows},
},
{
File: "config/testdata/rules/rules0-good.rules",
Interval: defaultEvalInterval,
Name: "TestGroup",
Type: config.NewPrometheusType(),
Rules: []rule.Rule{
Conns,
ExampleAlertAlwaysFiring,
},
},
},
},
{
name: "update empty dir rules from 0 to 2 groups",
initPath: "config/testdata/empty/*",
updatePath: "config/testdata/rules/rules0-good.rules",
want: []*rule.Group{
{
File: "config/testdata/rules/rules0-good.rules",
Name: "groupGorSingleAlert",
Type: config.NewPrometheusType(),
Interval: defaultEvalInterval,
Rules: []rule.Rule{VMRows},
},
{
File: "config/testdata/rules/rules0-good.rules",
Interval: defaultEvalInterval,
Type: config.NewPrometheusType(),
Name: "TestGroup",
Rules: []rule.Rule{
Conns,
ExampleAlertAlwaysFiring,
},
},
},
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
ctx, cancel := context.WithCancel(context.TODO())
m := &manager{
groups: make(map[uint64]*rule.Group),
querierBuilder: &datasource.FakeQuerier{},
notifiers: func() []notifier.Notifier { return []notifier.Notifier{&notifier.FakeNotifier{}} },
}
cfgInit := loadCfg(t, []string{tc.initPath}, true, true)
if err := m.update(ctx, cfgInit, false); err != nil {
t.Fatalf("failed to complete initial rules update: %s", err)
}
cfgUpdate, err := config.Parse([]string{tc.updatePath}, notifier.ValidateTemplates, true)
if err == nil { // update can fail and that's expected
_ = m.update(ctx, cfgUpdate, false)
}
if len(tc.want) != len(m.groups) {
t.Fatalf("\nwant number of groups: %d;\ngot: %d ", len(tc.want), len(m.groups))
}
for _, wantG := range tc.want {
gotG, ok := m.groups[wantG.ID()]
if !ok {
t.Fatalf("expected to have group %q", wantG.Name)
}
compareGroups(t, wantG, gotG)
}
cancel()
m.close()
})
}
// update good rules from 1 to 2 groups
f("config/testdata/dir/rules/rules1-good.rules", "config/testdata/rules/rules0-good.rules", []*rule.Group{
{
File: "config/testdata/rules/rules0-good.rules",
Name: "groupGorSingleAlert",
Type: config.NewPrometheusType(),
Interval: defaultEvalInterval,
Rules: []rule.Rule{VMRows},
},
{
File: "config/testdata/rules/rules0-good.rules",
Interval: defaultEvalInterval,
Type: config.NewPrometheusType(),
Name: "TestGroup",
Rules: []rule.Rule{
Conns,
ExampleAlertAlwaysFiring,
},
},
})
// update with one bad rule file
f("config/testdata/rules/rules0-good.rules", "config/testdata/dir/rules2-bad.rules", []*rule.Group{
{
File: "config/testdata/rules/rules0-good.rules",
Name: "groupGorSingleAlert",
Type: config.NewPrometheusType(),
Interval: defaultEvalInterval,
Rules: []rule.Rule{VMRows},
},
{
File: "config/testdata/rules/rules0-good.rules",
Interval: defaultEvalInterval,
Name: "TestGroup",
Type: config.NewPrometheusType(),
Rules: []rule.Rule{
Conns,
ExampleAlertAlwaysFiring,
},
},
})
// update empty dir rules from 0 to 2 groups
f("config/testdata/empty/*", "config/testdata/rules/rules0-good.rules", []*rule.Group{
{
File: "config/testdata/rules/rules0-good.rules",
Name: "groupGorSingleAlert",
Type: config.NewPrometheusType(),
Interval: defaultEvalInterval,
Rules: []rule.Rule{VMRows},
},
{
File: "config/testdata/rules/rules0-good.rules",
Interval: defaultEvalInterval,
Type: config.NewPrometheusType(),
Name: "TestGroup",
Rules: []rule.Rule{
Conns,
ExampleAlertAlwaysFiring,
},
},
})
}
func compareGroups(t *testing.T, a, b *rule.Group) {
t.Helper()
if a.Name != b.Name {
@ -285,82 +266,59 @@ func compareGroups(t *testing.T, a, b *rule.Group) {
}
}
func TestManagerUpdateNegative(t *testing.T) {
testCases := []struct {
notifiers []notifier.Notifier
rw remotewrite.RWClient
cfg config.Group
expErr string
}{
{
nil,
nil,
config.Group{
func TestManagerUpdate_Failure(t *testing.T) {
f := func(notifiers []notifier.Notifier, rw remotewrite.RWClient, cfg config.Group, errStrExpected string) {
t.Helper()
m := &manager{
groups: make(map[uint64]*rule.Group),
querierBuilder: &datasource.FakeQuerier{},
rw: rw,
}
if notifiers != nil {
m.notifiers = func() []notifier.Notifier { return notifiers }
}
err := m.update(context.Background(), []config.Group{cfg}, false)
if err == nil {
t.Fatalf("expected to get error; got nil")
}
errStr := err.Error()
if !strings.Contains(errStr, errStrExpected) {
t.Fatalf("missing %q in the error %q", errStrExpected, errStr)
}
}
f(nil, nil, config.Group{
Name: "Recording rule only",
Rules: []config.Rule{
{Record: "record", Expr: "max(up)"},
},
},
"contains recording rules",
},
{
nil,
nil,
config.Group{
}, "contains recording rules")
f(nil, nil, config.Group{
Name: "Alerting rule only",
Rules: []config.Rule{
{Alert: "alert", Expr: "up > 0"},
},
},
"contains alerting rules",
},
{
[]notifier.Notifier{&notifier.FakeNotifier{}},
nil,
config.Group{
}, "contains alerting rules")
f([]notifier.Notifier{&notifier.FakeNotifier{}}, nil, config.Group{
Name: "Recording and alerting rules",
Rules: []config.Rule{
{Alert: "alert1", Expr: "up > 0"},
{Alert: "alert2", Expr: "up > 0"},
{Record: "record", Expr: "max(up)"},
},
},
"contains recording rules",
},
{
nil,
&remotewrite.Client{},
config.Group{
}, "contains recording rules")
f(nil, &remotewrite.Client{}, config.Group{
Name: "Recording and alerting rules",
Rules: []config.Rule{
{Record: "record1", Expr: "max(up)"},
{Record: "record2", Expr: "max(up)"},
{Alert: "alert", Expr: "up > 0"},
},
},
"contains alerting rules",
},
}
for _, tc := range testCases {
t.Run(tc.cfg.Name, func(t *testing.T) {
m := &manager{
groups: make(map[uint64]*rule.Group),
querierBuilder: &datasource.FakeQuerier{},
rw: tc.rw,
}
if tc.notifiers != nil {
m.notifiers = func() []notifier.Notifier { return tc.notifiers }
}
err := m.update(context.Background(), []config.Group{tc.cfg}, false)
if err == nil {
t.Fatalf("expected to get error; got nil")
}
if !strings.Contains(err.Error(), tc.expErr) {
t.Fatalf("expected err to contain %q; got %q", tc.expErr, err)
}
})
}
}, "contains alerting rules")
}
func loadCfg(t *testing.T, path []string, validateAnnotations, validateExpressions bool) []config.Group {

View file

@ -11,7 +11,7 @@ import (
"github.com/VictoriaMetrics/VictoriaMetrics/lib/promrelabel"
)
func TestAlert_ExecTemplate(t *testing.T) {
func TestAlertExecTemplate(t *testing.T) {
extLabels := make(map[string]string)
const (
extCluster = "prod"
@ -23,159 +23,11 @@ func TestAlert_ExecTemplate(t *testing.T) {
_, err := Init(nil, extLabels, extURL)
checkErr(t, err)
testCases := []struct {
name string
alert *Alert
annotations map[string]string
expTpl map[string]string
}{
{
name: "empty-alert",
alert: &Alert{},
annotations: map[string]string{},
expTpl: map[string]string{},
},
{
name: "no-template",
alert: &Alert{
Value: 1e4,
Labels: map[string]string{
"instance": "localhost",
},
},
annotations: map[string]string{},
expTpl: map[string]string{},
},
{
name: "label-template",
alert: &Alert{
Value: 1e4,
Labels: map[string]string{
"job": "staging",
"instance": "localhost",
},
For: 5 * time.Minute,
},
annotations: map[string]string{
"summary": "Too high connection number for {{$labels.instance}} for job {{$labels.job}}",
"description": "It is {{ $value }} connections for {{$labels.instance}} for more than {{ .For }}",
},
expTpl: map[string]string{
"summary": "Too high connection number for localhost for job staging",
"description": "It is 10000 connections for localhost for more than 5m0s",
},
},
{
name: "expression-template",
alert: &Alert{
Expr: `vm_rows{"label"="bar"}<0`,
},
annotations: map[string]string{
"exprEscapedQuery": "{{ $expr|queryEscape }}",
"exprEscapedPath": "{{ $expr|pathEscape }}",
"exprEscapedJSON": "{{ $expr|jsonEscape }}",
"exprEscapedQuotes": "{{ $expr|quotesEscape }}",
"exprEscapedHTML": "{{ $expr|htmlEscape }}",
},
expTpl: map[string]string{
"exprEscapedQuery": "vm_rows%7B%22label%22%3D%22bar%22%7D%3C0",
"exprEscapedPath": "vm_rows%7B%22label%22=%22bar%22%7D%3C0",
"exprEscapedJSON": `"vm_rows{\"label\"=\"bar\"}\u003c0"`,
"exprEscapedQuotes": `vm_rows{\"label\"=\"bar\"}\u003c0`,
"exprEscapedHTML": "vm_rows{&quot;label&quot;=&quot;bar&quot;}&lt;0",
},
},
{
name: "query",
alert: &Alert{Expr: `vm_rows{"label"="bar"}>0`},
annotations: map[string]string{
"summary": `{{ query "foo" | first | value }}`,
"desc": `{{ range query "bar" }}{{ . | label "foo" }} {{ . | value }};{{ end }}`,
},
expTpl: map[string]string{
"summary": "1",
"desc": "bar 1;garply 2;",
},
},
{
name: "external",
alert: &Alert{
Value: 1e4,
Labels: map[string]string{
"job": "staging",
"instance": "localhost",
},
},
annotations: map[string]string{
"url": "{{ $externalURL }}",
"summary": "Issues with {{$labels.instance}} (dc-{{$externalLabels.dc}}) for job {{$labels.job}}",
"description": "It is {{ $value }} connections for {{$labels.instance}} (cluster-{{$externalLabels.cluster}})",
},
expTpl: map[string]string{
"url": extURL,
"summary": fmt.Sprintf("Issues with localhost (dc-%s) for job staging", extDC),
"description": fmt.Sprintf("It is 10000 connections for localhost (cluster-%s)", extCluster),
},
},
{
name: "alert and group IDs",
alert: &Alert{
ID: 42,
GroupID: 24,
},
annotations: map[string]string{
"url": "/api/v1/alert?alertID={{$alertID}}&groupID={{$groupID}}",
},
expTpl: map[string]string{
"url": "/api/v1/alert?alertID=42&groupID=24",
},
},
{
name: "ActiveAt time",
alert: &Alert{
ActiveAt: time.Date(2022, 8, 19, 20, 34, 58, 651387237, time.UTC),
},
annotations: map[string]string{
"diagram": "![](http://example.com?render={{$activeAt.Unix}}",
},
expTpl: map[string]string{
"diagram": "![](http://example.com?render=1660941298",
},
},
{
name: "ActiveAt time is nil",
alert: &Alert{},
annotations: map[string]string{
"default_time": "{{$activeAt}}",
},
expTpl: map[string]string{
"default_time": "0001-01-01 00:00:00 +0000 UTC",
},
},
{
name: "ActiveAt custom format",
alert: &Alert{
ActiveAt: time.Date(2022, 8, 19, 20, 34, 58, 651387237, time.UTC),
},
annotations: map[string]string{
"fire_time": `{{$activeAt.Format "2006/01/02 15:04:05"}}`,
},
expTpl: map[string]string{
"fire_time": "2022/08/19 20:34:58",
},
},
{
name: "ActiveAt query range",
alert: &Alert{
ActiveAt: time.Date(2022, 8, 19, 20, 34, 58, 651387237, time.UTC),
},
annotations: map[string]string{
"grafana_url": `vm-grafana.com?from={{($activeAt.Add (parseDurationTime "1h")).Unix}}&to={{($activeAt.Add (parseDurationTime "-1h")).Unix}}`,
},
expTpl: map[string]string{
"grafana_url": "vm-grafana.com?from=1660944898&to=1660937698",
},
},
f := func(alert *Alert, annotations map[string]string, tplExpected map[string]string) {
t.Helper()
if err := ValidateTemplates(annotations); err != nil {
t.Fatalf("cannot validate annotations: %s", err)
}
qFn := func(_ string) ([]datasource.Metric, error) {
@ -198,26 +50,137 @@ func TestAlert_ExecTemplate(t *testing.T) {
},
}, nil
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
if err := ValidateTemplates(tc.annotations); err != nil {
t.Fatal(err)
}
tpl, err := tc.alert.ExecTemplate(qFn, tc.alert.Labels, tc.annotations)
tpl, err := alert.ExecTemplate(qFn, alert.Labels, annotations)
if err != nil {
t.Fatal(err)
t.Fatalf("cannot execute template: %s", err)
}
if len(tpl) != len(tc.expTpl) {
t.Fatalf("expected %d elements; got %d", len(tc.expTpl), len(tpl))
if len(tpl) != len(tplExpected) {
t.Fatalf("unexpected number of elements; got %d; want %d", len(tpl), len(tplExpected))
}
for k := range tc.expTpl {
got, exp := tpl[k], tc.expTpl[k]
for k := range tplExpected {
got, exp := tpl[k], tplExpected[k]
if got != exp {
t.Fatalf("expected %q=%q; got %q=%q", k, exp, k, got)
t.Fatalf("unexpected template for key=%q; got %q; want %q", k, got, exp)
}
}
}
// empty-alert
f(&Alert{}, map[string]string{}, map[string]string{})
// no-template
f(&Alert{
Value: 1e4,
Labels: map[string]string{
"instance": "localhost",
},
}, map[string]string{}, map[string]string{})
// label-template
f(&Alert{
Value: 1e4,
Labels: map[string]string{
"job": "staging",
"instance": "localhost",
},
For: 5 * time.Minute,
}, map[string]string{
"summary": "Too high connection number for {{$labels.instance}} for job {{$labels.job}}",
"description": "It is {{ $value }} connections for {{$labels.instance}} for more than {{ .For }}",
}, map[string]string{
"summary": "Too high connection number for localhost for job staging",
"description": "It is 10000 connections for localhost for more than 5m0s",
})
// expression-template
f(&Alert{
Expr: `vm_rows{"label"="bar"}<0`,
}, map[string]string{
"exprEscapedQuery": "{{ $expr|queryEscape }}",
"exprEscapedPath": "{{ $expr|pathEscape }}",
"exprEscapedJSON": "{{ $expr|jsonEscape }}",
"exprEscapedQuotes": "{{ $expr|quotesEscape }}",
"exprEscapedHTML": "{{ $expr|htmlEscape }}",
}, map[string]string{
"exprEscapedQuery": "vm_rows%7B%22label%22%3D%22bar%22%7D%3C0",
"exprEscapedPath": "vm_rows%7B%22label%22=%22bar%22%7D%3C0",
"exprEscapedJSON": `"vm_rows{\"label\"=\"bar\"}\u003c0"`,
"exprEscapedQuotes": `vm_rows{\"label\"=\"bar\"}\u003c0`,
"exprEscapedHTML": "vm_rows{&quot;label&quot;=&quot;bar&quot;}&lt;0",
})
// query
f(&Alert{
Expr: `vm_rows{"label"="bar"}>0`,
}, map[string]string{
"summary": `{{ query "foo" | first | value }}`,
"desc": `{{ range query "bar" }}{{ . | label "foo" }} {{ . | value }};{{ end }}`,
}, map[string]string{
"summary": "1",
"desc": "bar 1;garply 2;",
})
// external
f(&Alert{
Value: 1e4,
Labels: map[string]string{
"job": "staging",
"instance": "localhost",
},
}, map[string]string{
"url": "{{ $externalURL }}",
"summary": "Issues with {{$labels.instance}} (dc-{{$externalLabels.dc}}) for job {{$labels.job}}",
"description": "It is {{ $value }} connections for {{$labels.instance}} (cluster-{{$externalLabels.cluster}})",
}, map[string]string{
"url": extURL,
"summary": fmt.Sprintf("Issues with localhost (dc-%s) for job staging", extDC),
"description": fmt.Sprintf("It is 10000 connections for localhost (cluster-%s)", extCluster),
})
// alert and group IDs
f(&Alert{
ID: 42,
GroupID: 24,
}, map[string]string{
"url": "/api/v1/alert?alertID={{$alertID}}&groupID={{$groupID}}",
}, map[string]string{
"url": "/api/v1/alert?alertID=42&groupID=24",
})
// ActiveAt time
f(&Alert{
ActiveAt: time.Date(2022, 8, 19, 20, 34, 58, 651387237, time.UTC),
}, map[string]string{
"diagram": "![](http://example.com?render={{$activeAt.Unix}}",
}, map[string]string{
"diagram": "![](http://example.com?render=1660941298",
})
// ActiveAt time is nil
f(&Alert{}, map[string]string{
"default_time": "{{$activeAt}}",
}, map[string]string{
"default_time": "0001-01-01 00:00:00 +0000 UTC",
})
// ActiveAt custom format
f(&Alert{
ActiveAt: time.Date(2022, 8, 19, 20, 34, 58, 651387237, time.UTC),
}, map[string]string{
"fire_time": `{{$activeAt.Format "2006/01/02 15:04:05"}}`,
}, map[string]string{
"fire_time": "2022/08/19 20:34:58",
})
// ActiveAt query range
f(&Alert{
ActiveAt: time.Date(2022, 8, 19, 20, 34, 58, 651387237, time.UTC),
}, map[string]string{
"grafana_url": `vm-grafana.com?from={{($activeAt.Add (parseDurationTime "1h")).Unix}}&to={{($activeAt.Add (parseDurationTime "-1h")).Unix}}`,
}, map[string]string{
"grafana_url": "vm-grafana.com?from=1660944898&to=1660937698",
})
}
}
func TestAlert_toPromLabels(t *testing.T) {

View file

@ -16,10 +16,10 @@ func TestAlertManager_Addr(t *testing.T) {
const addr = "http://localhost"
am, err := NewAlertManager(addr, nil, promauth.HTTPClientConfig{}, nil, 0)
if err != nil {
t.Errorf("unexpected error: %s", err)
t.Fatalf("unexpected error: %s", err)
}
if am.Addr() != addr {
t.Errorf("expected to have %q; got %q", addr, am.Addr())
t.Fatalf("expected to have %q; got %q", addr, am.Addr())
}
}
@ -28,21 +28,20 @@ func TestAlertManager_Send(t *testing.T) {
const headerKey, headerValue = "TenantID", "foo"
mux := http.NewServeMux()
mux.HandleFunc("/", func(_ http.ResponseWriter, _ *http.Request) {
t.Errorf("should not be called")
t.Fatalf("should not be called")
})
c := -1
mux.HandleFunc(alertManagerPath, func(w http.ResponseWriter, r *http.Request) {
user, pass, ok := r.BasicAuth()
if !ok {
t.Errorf("unauthorized request")
t.Fatalf("unauthorized request")
}
if user != baUser || pass != baPass {
t.Errorf("wrong creds %q:%q; expected %q:%q",
user, pass, baUser, baPass)
t.Fatalf("wrong creds %q:%q; expected %q:%q", user, pass, baUser, baPass)
}
c++
if r.Method != http.MethodPost {
t.Errorf("expected POST method got %s", r.Method)
t.Fatalf("expected POST method got %s", r.Method)
}
switch c {
case 0:
@ -59,25 +58,23 @@ func TestAlertManager_Send(t *testing.T) {
GeneratorURL string `json:"generatorURL"`
}
if err := json.NewDecoder(r.Body).Decode(&a); err != nil {
t.Errorf("can not unmarshal data into alert %s", err)
t.FailNow()
t.Fatalf("can not unmarshal data into alert %s", err)
}
if len(a) != 1 {
t.Errorf("expected 1 alert in array got %d", len(a))
t.Fatalf("expected 1 alert in array got %d", len(a))
}
if a[0].GeneratorURL != "0/0" {
t.Errorf("expected 0/0 as generatorURL got %s", a[0].GeneratorURL)
t.Fatalf("expected 0/0 as generatorURL got %s", a[0].GeneratorURL)
}
if a[0].StartsAt.IsZero() {
t.Errorf("expected non-zero start time")
t.Fatalf("expected non-zero start time")
}
if a[0].EndAt.IsZero() {
t.Errorf("expected non-zero end time")
t.Fatalf("expected non-zero end time")
}
case 3:
if r.Header.Get(headerKey) != headerValue {
t.Errorf("expected header %q to be set to %q; got %q instead",
headerKey, headerValue, r.Header.Get(headerKey))
t.Fatalf("expected header %q to be set to %q; got %q instead", headerKey, headerValue, r.Header.Get(headerKey))
}
}
})
@ -94,13 +91,13 @@ func TestAlertManager_Send(t *testing.T) {
return strconv.FormatUint(alert.GroupID, 10) + "/" + strconv.FormatUint(alert.ID, 10)
}, aCfg, nil, 0)
if err != nil {
t.Errorf("unexpected error: %s", err)
t.Fatalf("unexpected error: %s", err)
}
if err := am.Send(context.Background(), []Alert{{}, {}}, nil); err == nil {
t.Error("expected connection error got nil")
t.Fatalf("expected connection error got nil")
}
if err := am.Send(context.Background(), []Alert{}, nil); err == nil {
t.Error("expected wrong http code error got nil")
t.Fatalf("expected wrong http code error got nil")
}
if err := am.Send(context.Background(), []Alert{{
GroupID: 0,
@ -109,12 +106,12 @@ func TestAlertManager_Send(t *testing.T) {
End: time.Now().UTC(),
Annotations: map[string]string{"a": "b", "c": "d", "e": "f"},
}}, nil); err != nil {
t.Errorf("unexpected error %s", err)
t.Fatalf("unexpected error %s", err)
}
if c != 2 {
t.Errorf("expected 2 calls(count from zero) to server got %d", c)
t.Fatalf("expected 2 calls(count from zero) to server got %d", c)
}
if err := am.Send(context.Background(), nil, map[string]string{headerKey: headerValue}); err != nil {
t.Errorf("unexpected error %s", err)
t.Fatalf("unexpected error %s", err)
}
}

View file

@ -5,10 +5,14 @@ import (
"testing"
)
func TestConfigParseGood(t *testing.T) {
func TestParseConfig_Success(t *testing.T) {
f := func(path string) {
t.Helper()
_, err := parseConfig(path)
checkErr(t, err)
if err != nil {
t.Fatalf("unexpected error: %s", err)
}
}
f("testdata/mixed.good.yaml")
f("testdata/consul.good.yaml")
@ -16,14 +20,16 @@ func TestConfigParseGood(t *testing.T) {
f("testdata/static.good.yaml")
}
func TestConfigParseBad(t *testing.T) {
func TestParseConfig_Failure(t *testing.T) {
f := func(path, expErr string) {
t.Helper()
_, err := parseConfig(path)
if err == nil {
t.Fatalf("expected to get non-nil err for config %q", path)
}
if !strings.Contains(err.Error(), expErr) {
t.Errorf("expected err to contain %q; got %q instead", expErr, err)
t.Fatalf("expected err to contain %q; got %q instead", expErr, err)
}
}

View file

@ -319,46 +319,41 @@ func TestMergeHTTPClientConfigs(t *testing.T) {
}
}
func TestParseLabels(t *testing.T) {
testCases := []struct {
name string
target string
cfg *Config
expectedAddress string
expectedErr bool
}{
{
"invalid address",
"invalid:*//url",
&Config{},
"",
true,
},
{
"use some default params",
"alertmanager:9093",
&Config{PathPrefix: "test"},
"http://alertmanager:9093/test/api/v2/alerts",
false,
},
{
"use target address",
"https://alertmanager:9093/api/v1/alerts",
&Config{Scheme: "http", PathPrefix: "test"},
"https://alertmanager:9093/api/v1/alerts",
false,
},
func TestParseLabels_Failure(t *testing.T) {
f := func(target string, cfg *Config) {
t.Helper()
_, _, err := parseLabels(target, nil, cfg)
if err == nil {
t.Fatalf("expecting non-nil error")
}
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
address, _, err := parseLabels(tc.target, nil, tc.cfg)
if err == nil == tc.expectedErr {
t.Fatalf("unexpected error; got %t; want %t", err != nil, tc.expectedErr)
}
if address != tc.expectedAddress {
t.Fatalf("unexpected address; got %q; want %q", address, tc.expectedAddress)
}
})
}
// invalid address
f("invalid:*//url", &Config{})
}
func TestParseLabels_Success(t *testing.T) {
f := func(target string, cfg *Config, expectedAddress string) {
t.Helper()
address, _, err := parseLabels(target, nil, cfg)
if err != nil {
t.Fatalf("unexpected error: %s", err)
}
if address != expectedAddress {
t.Fatalf("unexpected address; got %q; want %q", address, expectedAddress)
}
}
// use some default params
f("alertmanager:9093", &Config{
PathPrefix: "test",
}, "http://alertmanager:9093/test/api/v2/alerts")
// use target address
f("https://alertmanager:9093/api/v1/alerts", &Config{
Scheme: "http",
PathPrefix: "test",
}, "https://alertmanager:9093/api/v1/alerts")
}

View file

@ -17,12 +17,12 @@ func TestBlackHoleNotifier_Send(t *testing.T) {
End: time.Now().UTC(),
Annotations: map[string]string{"a": "b", "c": "d", "e": "f"},
}}, nil); err != nil {
t.Errorf("unexpected error %s", err)
t.Fatalf("unexpected error %s", err)
}
alertCount := bh.metrics.alertsSent.Get()
if alertCount != 1 {
t.Errorf("expect value 1; instead got %d", alertCount)
t.Fatalf("expect value 1; instead got %d", alertCount)
}
}
@ -35,7 +35,7 @@ func TestBlackHoleNotifier_Close(t *testing.T) {
End: time.Now().UTC(),
Annotations: map[string]string{"a": "b", "c": "d", "e": "f"},
}}, nil); err != nil {
t.Errorf("unexpected error %s", err)
t.Fatalf("unexpected error %s", err)
}
bh.Close()
@ -44,7 +44,7 @@ func TestBlackHoleNotifier_Close(t *testing.T) {
alertMetricName := "vmalert_alerts_sent_total{addr=\"blackhole\"}"
for _, name := range defaultMetrics.ListMetricNames() {
if name == alertMetricName {
t.Errorf("Metric name should have unregistered.But still present")
t.Fatalf("Metric name should have unregistered.But still present")
}
}
}

View file

@ -44,8 +44,7 @@ func TestClient_Push(t *testing.T) {
}
r := rand.New(rand.NewSource(1))
const rowsN = 1e4
var sent int
const rowsN = int(1e4)
for i := 0; i < rowsN; i++ {
s := prompbmarshal.TimeSeries{
Samples: []prompbmarshal.Sample{{
@ -57,17 +56,11 @@ func TestClient_Push(t *testing.T) {
if err != nil {
t.Fatalf("unexpected err: %s", err)
}
if err == nil {
sent++
}
err = faultyClient.Push(s)
if err != nil {
t.Fatalf("unexpected err: %s", err)
}
}
if sent == 0 {
t.Fatalf("0 series sent")
}
if err := client.Close(); err != nil {
t.Fatalf("failed to close client: %s", err)
}
@ -75,37 +68,21 @@ func TestClient_Push(t *testing.T) {
t.Fatalf("failed to close faulty client: %s", err)
}
got := testSrv.accepted()
if got != sent {
t.Fatalf("expected to have %d series; got %d", sent, got)
if got != rowsN {
t.Fatalf("expected to have %d series; got %d", rowsN, got)
}
got = faultySrv.accepted()
if got != sent {
t.Fatalf("expected to have %d series for faulty client; got %d", sent, got)
if got != rowsN {
t.Fatalf("expected to have %d series for faulty client; got %d", rowsN, got)
}
}
func TestClient_run_maxBatchSizeDuringShutdown(t *testing.T) {
batchSize := 20
const batchSize = 20
testTable := []struct {
name string // name of the test case
pushCnt int // how many time series is pushed to the client
batchCnt int // the expected batch count sent by the client
}{
{
name: "pushCnt % batchSize == 0",
pushCnt: batchSize * 40,
batchCnt: 40,
},
{
name: "pushCnt % batchSize != 0",
pushCnt: batchSize*40 + 1,
batchCnt: 40 + 1,
},
}
f := func(pushCnt, batchCntExpected int) {
t.Helper()
for _, tt := range testTable {
t.Run(tt.name, func(t *testing.T) {
// run new server
bcServer := newBatchCntRWServer()
@ -122,30 +99,35 @@ func TestClient_run_maxBatchSizeDuringShutdown(t *testing.T) {
Addr: bcServer.URL,
})
if err != nil {
t.Fatalf("new remote write client failed, err: %v", err)
t.Fatalf("cannot create remote write client: %s", err)
}
// push time series to the client.
for i := 0; i < tt.pushCnt; i++ {
for i := 0; i < pushCnt; i++ {
if err = rwClient.Push(prompbmarshal.TimeSeries{}); err != nil {
t.Fatalf("push time series to the client failed, err: %v", err)
t.Fatalf("cannot time series to the client: %s", err)
}
}
// close the client so the rest ts will be flushed in `shutdown`
if err = rwClient.Close(); err != nil {
t.Fatalf("shutdown client failed, err: %v", err)
t.Fatalf("cannot shutdown client: %s", err)
}
// finally check how many batches is sent.
if tt.batchCnt != bcServer.acceptedBatches() {
t.Errorf("client sent batch count incorrect, want: %d, get: %d", tt.batchCnt, bcServer.acceptedBatches())
if bcServer.acceptedBatches() != batchCntExpected {
t.Fatalf("client sent batch count incorrect; got %d; want %d", bcServer.acceptedBatches(), batchCntExpected)
}
if tt.pushCnt != bcServer.accepted() {
t.Errorf("client sent time series count incorrect, want: %d, get: %d", tt.pushCnt, bcServer.accepted())
if pushCnt != bcServer.accepted() {
t.Fatalf("client sent time series count incorrect; got %d; want %d", bcServer.accepted(), pushCnt)
}
})
}
// pushCnt % batchSize == 0
f(batchSize*40, 40)
//pushCnt % batchSize != 0
f(batchSize*40+1, 40+1)
}
func newRWServer() *rwServer {

View file

@ -39,36 +39,44 @@ func (fr *fakeReplayQuerier) QueryRange(_ context.Context, q string, from, to ti
}
func TestReplay(t *testing.T) {
testCases := []struct {
name string
from, to string
maxDP int
cfg []config.Group
qb *fakeReplayQuerier
}{
{
name: "one rule + one response",
from: "2021-01-01T12:00:00.000Z",
to: "2021-01-01T12:02:00.000Z",
maxDP: 10,
cfg: []config.Group{
f := func(from, to string, maxDP int, cfg []config.Group, qb *fakeReplayQuerier) {
t.Helper()
fromOrig, toOrig, maxDatapointsOrig := *replayFrom, *replayTo, *replayMaxDatapoints
retriesOrig, delayOrig := *replayRuleRetryAttempts, *replayRulesDelay
defer func() {
*replayFrom, *replayTo = fromOrig, toOrig
*replayMaxDatapoints, *replayRuleRetryAttempts = maxDatapointsOrig, retriesOrig
*replayRulesDelay = delayOrig
}()
*replayRuleRetryAttempts = 1
*replayRulesDelay = time.Millisecond
rwb := &remotewrite.DebugClient{}
*replayFrom = from
*replayTo = to
*replayMaxDatapoints = maxDP
if err := replay(cfg, qb, rwb); err != nil {
t.Fatalf("replay failed: %s", err)
}
if len(qb.registry) > 0 {
t.Fatalf("not all requests were sent: %#v", qb.registry)
}
}
// one rule + one response
f("2021-01-01T12:00:00.000Z", "2021-01-01T12:02:00.000Z", 10, []config.Group{
{Rules: []config.Rule{{Record: "foo", Expr: "sum(up)"}}},
},
qb: &fakeReplayQuerier{
}, &fakeReplayQuerier{
registry: map[string]map[string]struct{}{
"sum(up)": {"12:00:00+12:02:00": {}},
},
},
},
{
name: "one rule + multiple responses",
from: "2021-01-01T12:00:00.000Z",
to: "2021-01-01T12:02:30.000Z",
maxDP: 1,
cfg: []config.Group{
})
// one rule + multiple responses
f("2021-01-01T12:00:00.000Z", "2021-01-01T12:02:30.000Z", 1, []config.Group{
{Rules: []config.Rule{{Record: "foo", Expr: "sum(up)"}}},
},
qb: &fakeReplayQuerier{
}, &fakeReplayQuerier{
registry: map[string]map[string]struct{}{
"sum(up)": {
"12:00:00+12:01:00": {},
@ -76,17 +84,12 @@ func TestReplay(t *testing.T) {
"12:02:00+12:02:30": {},
},
},
},
},
{
name: "datapoints per step",
from: "2021-01-01T12:00:00.000Z",
to: "2021-01-01T15:02:30.000Z",
maxDP: 60,
cfg: []config.Group{
})
// datapoints per step
f("2021-01-01T12:00:00.000Z", "2021-01-01T15:02:30.000Z", 60, []config.Group{
{Interval: promutils.NewDuration(time.Minute), Rules: []config.Rule{{Record: "foo", Expr: "sum(up)"}}},
},
qb: &fakeReplayQuerier{
}, &fakeReplayQuerier{
registry: map[string]map[string]struct{}{
"sum(up)": {
"12:00:00+13:00:00": {},
@ -95,18 +98,13 @@ func TestReplay(t *testing.T) {
"15:00:00+15:02:30": {},
},
},
},
},
{
name: "multiple recording rules + multiple responses",
from: "2021-01-01T12:00:00.000Z",
to: "2021-01-01T12:02:30.000Z",
maxDP: 1,
cfg: []config.Group{
})
// multiple recording rules + multiple responses
f("2021-01-01T12:00:00.000Z", "2021-01-01T12:02:30.000Z", 1, []config.Group{
{Rules: []config.Rule{{Record: "foo", Expr: "sum(up)"}}},
{Rules: []config.Rule{{Record: "bar", Expr: "max(up)"}}},
},
qb: &fakeReplayQuerier{
}, &fakeReplayQuerier{
registry: map[string]map[string]struct{}{
"sum(up)": {
"12:00:00+12:01:00": {},
@ -119,18 +117,13 @@ func TestReplay(t *testing.T) {
"12:02:00+12:02:30": {},
},
},
},
},
{
name: "multiple alerting rules + multiple responses",
from: "2021-01-01T12:00:00.000Z",
to: "2021-01-01T12:02:30.000Z",
maxDP: 1,
cfg: []config.Group{
})
// multiple alerting rules + multiple responses
f("2021-01-01T12:00:00.000Z", "2021-01-01T12:02:30.000Z", 1, []config.Group{
{Rules: []config.Rule{{Alert: "foo", Expr: "sum(up) > 1"}}},
{Rules: []config.Rule{{Alert: "bar", Expr: "max(up) < 1"}}},
},
qb: &fakeReplayQuerier{
}, &fakeReplayQuerier{
registry: map[string]map[string]struct{}{
"sum(up) > 1": {
"12:00:00+12:01:00": {},
@ -143,31 +136,5 @@ func TestReplay(t *testing.T) {
"12:02:00+12:02:30": {},
},
},
},
},
}
from, to, maxDP := *replayFrom, *replayTo, *replayMaxDatapoints
retries, delay := *replayRuleRetryAttempts, *replayRulesDelay
defer func() {
*replayFrom, *replayTo = from, to
*replayMaxDatapoints, *replayRuleRetryAttempts = maxDP, retries
*replayRulesDelay = delay
}()
*replayRuleRetryAttempts = 1
*replayRulesDelay = time.Millisecond
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
*replayFrom = tc.from
*replayTo = tc.to
*replayMaxDatapoints = tc.maxDP
if err := replay(tc.cfg, tc.qb, &remotewrite.DebugClient{}); err != nil {
t.Fatalf("replay failed: %s", err)
}
if len(tc.qb.registry) > 0 {
t.Fatalf("not all requests were sent: %#v", tc.qb.registry)
}
})
}
}

File diff suppressed because it is too large Load diff

View file

@ -37,19 +37,58 @@ func TestMain(m *testing.M) {
}
func TestUpdateWith(t *testing.T) {
testCases := []struct {
name string
currentRules []config.Rule
newRules []config.Rule
}{
{
"new rule",
nil,
[]config.Rule{{Alert: "bar"}},
},
{
"update alerting rule",
[]config.Rule{
f := func(currentRules, newRules []config.Rule) {
t.Helper()
g := &Group{
Name: "test",
}
qb := &datasource.FakeQuerier{}
for _, r := range currentRules {
r.ID = config.HashRule(r)
g.Rules = append(g.Rules, g.newRule(qb, r))
}
ng := &Group{
Name: "test",
}
for _, r := range newRules {
r.ID = config.HashRule(r)
ng.Rules = append(ng.Rules, ng.newRule(qb, r))
}
err := g.updateWith(ng)
if err != nil {
t.Fatalf("cannot update rule: %s", err)
}
if len(g.Rules) != len(newRules) {
t.Fatalf("expected to have %d rules; got: %d", len(g.Rules), len(newRules))
}
sort.Slice(g.Rules, func(i, j int) bool {
return g.Rules[i].ID() < g.Rules[j].ID()
})
sort.Slice(ng.Rules, func(i, j int) bool {
return ng.Rules[i].ID() < ng.Rules[j].ID()
})
for i, r := range g.Rules {
got, want := r, ng.Rules[i]
if got.ID() != want.ID() {
t.Fatalf("expected to have rule %q; got %q", want, got)
}
if err := CompareRules(t, got, want); err != nil {
t.Fatalf("comparison error: %s", err)
}
}
}
// new rule
f(nil, []config.Rule{
{Alert: "bar"},
})
// update alerting rule
f([]config.Rule{
{
Alert: "foo",
Expr: "up > 0",
@ -70,8 +109,7 @@ func TestUpdateWith(t *testing.T) {
"bar": "baz",
},
},
},
[]config.Rule{
}, []config.Rule{
{
Alert: "foo",
Expr: "up > 10",
@ -92,103 +130,49 @@ func TestUpdateWith(t *testing.T) {
"bar": "baz",
},
},
},
},
{
"update recording rule",
[]config.Rule{{
})
// update recording rule
f([]config.Rule{{
Record: "foo",
Expr: "max(up)",
Labels: map[string]string{
"bar": "baz",
},
}},
[]config.Rule{{
}}, []config.Rule{{
Record: "foo",
Expr: "min(up)",
Labels: map[string]string{
"baz": "bar",
},
}},
},
{
"empty rule",
[]config.Rule{{Alert: "foo"}, {Record: "bar"}},
nil,
},
{
"multiple rules",
[]config.Rule{
}})
// empty rule
f([]config.Rule{{Alert: "foo"}, {Record: "bar"}}, nil)
// multiple rules
f([]config.Rule{
{Alert: "bar"},
{Alert: "baz"},
{Alert: "foo"},
},
[]config.Rule{
}, []config.Rule{
{Alert: "baz"},
{Record: "foo"},
},
},
{
"replace rule",
[]config.Rule{{Alert: "foo1"}},
[]config.Rule{{Alert: "foo2"}},
},
{
"replace multiple rules",
[]config.Rule{
})
// replace rule
f([]config.Rule{{Alert: "foo1"}}, []config.Rule{{Alert: "foo2"}})
// replace multiple rules
f([]config.Rule{
{Alert: "foo1"},
{Record: "foo2"},
{Alert: "foo3"},
},
[]config.Rule{
}, []config.Rule{
{Alert: "foo3"},
{Alert: "foo4"},
{Record: "foo5"},
},
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
g := &Group{Name: "test"}
qb := &datasource.FakeQuerier{}
for _, r := range tc.currentRules {
r.ID = config.HashRule(r)
g.Rules = append(g.Rules, g.newRule(qb, r))
}
ng := &Group{Name: "test"}
for _, r := range tc.newRules {
r.ID = config.HashRule(r)
ng.Rules = append(ng.Rules, ng.newRule(qb, r))
}
err := g.updateWith(ng)
if err != nil {
t.Fatal(err)
}
if len(g.Rules) != len(tc.newRules) {
t.Fatalf("expected to have %d rules; got: %d",
len(g.Rules), len(tc.newRules))
}
sort.Slice(g.Rules, func(i, j int) bool {
return g.Rules[i].ID() < g.Rules[j].ID()
})
sort.Slice(ng.Rules, func(i, j int) bool {
return ng.Rules[i].ID() < ng.Rules[j].ID()
})
for i, r := range g.Rules {
got, want := r, ng.Rules[i]
if got.ID() != want.ID() {
t.Fatalf("expected to have rule %q; got %q", want, got)
}
if err := CompareRules(t, got, want); err != nil {
t.Fatalf("comparison error: %s", err)
}
}
})
}
}
func TestGroupStart(t *testing.T) {
@ -312,30 +296,23 @@ func TestGroupStart(t *testing.T) {
<-finished
}
func TestResolveDuration(t *testing.T) {
testCases := []struct {
groupInterval time.Duration
maxDuration time.Duration
resendDelay time.Duration
expected time.Duration
}{
{time.Minute, 0, 0, 4 * time.Minute},
{time.Minute, 0, 2 * time.Minute, 8 * time.Minute},
{time.Minute, 4 * time.Minute, 4 * time.Minute, 4 * time.Minute},
{2 * time.Minute, time.Minute, 2 * time.Minute, time.Minute},
{time.Minute, 2 * time.Minute, 1 * time.Minute, 2 * time.Minute},
{2 * time.Minute, 0, 1 * time.Minute, 8 * time.Minute},
{0, 0, 0, 0},
func TestGetResolveDuration(t *testing.T) {
f := func(groupInterval, maxDuration, resendDelay, resultExpected time.Duration) {
t.Helper()
result := getResolveDuration(groupInterval, resendDelay, maxDuration)
if result != resultExpected {
t.Fatalf("unexpected result; got %s; want %s", result, resultExpected)
}
}
for _, tc := range testCases {
t.Run(fmt.Sprintf("%v-%v-%v", tc.groupInterval, tc.expected, tc.maxDuration), func(t *testing.T) {
got := getResolveDuration(tc.groupInterval, tc.resendDelay, tc.maxDuration)
if got != tc.expected {
t.Errorf("expected to have %v; got %v", tc.expected, got)
}
})
}
f(0, 0, 0, 0)
f(time.Minute, 0, 0, 4*time.Minute)
f(time.Minute, 0, 2*time.Minute, 8*time.Minute)
f(time.Minute, 4*time.Minute, 4*time.Minute, 4*time.Minute)
f(2*time.Minute, time.Minute, 2*time.Minute, time.Minute)
f(time.Minute, 2*time.Minute, 1*time.Minute, 2*time.Minute)
f(2*time.Minute, 0, 1*time.Minute, 8*time.Minute)
}
func TestGetStaleSeries(t *testing.T) {
@ -345,6 +322,7 @@ func TestGetStaleSeries(t *testing.T) {
}
f := func(r Rule, labels, expLabels [][]prompbmarshal.Label) {
t.Helper()
var tss []prompbmarshal.TimeSeries
for _, l := range labels {
tss = append(tss, newTimeSeriesPB([]float64{1}, []int64{ts.Unix()}, l))
@ -606,7 +584,7 @@ func TestGroupStartDelay(t *testing.T) {
delay := delayBeforeStart(at, key, g.Interval, g.EvalOffset)
gotStart := at.Add(delay)
if expTS != gotStart {
t.Errorf("expected to get %v; got %v instead", expTS, gotStart)
t.Fatalf("expected to get %v; got %v instead", expTS, gotStart)
}
}
@ -647,147 +625,75 @@ func TestGroupStartDelay(t *testing.T) {
}
func TestGetPrometheusReqTimestamp(t *testing.T) {
f := func(g *Group, tsOrigin, tsExpected string) {
t.Helper()
originT, _ := time.Parse(time.RFC3339, tsOrigin)
expT, _ := time.Parse(time.RFC3339, tsExpected)
gotTS := g.adjustReqTimestamp(originT)
if !gotTS.Equal(expT) {
t.Fatalf("get wrong prometheus request timestamp: %s; want %s", gotTS, expT)
}
}
offset := 30 * time.Minute
evalDelay := 1 * time.Minute
disableAlign := false
testCases := []struct {
name string
g *Group
originTS, expTS string
}{
{
"with query align + default evalDelay",
&Group{
// with query align + default evalDelay
f(&Group{
Interval: time.Hour,
},
"2023-08-28T11:11:00+00:00",
"2023-08-28T11:00:00+00:00",
},
{
"without query align + default evalDelay",
&Group{
}, "2023-08-28T11:11:00+00:00", "2023-08-28T11:00:00+00:00")
// without query align + default evalDelay
f(&Group{
Interval: time.Hour,
evalAlignment: &disableAlign,
},
"2023-08-28T11:11:00+00:00",
"2023-08-28T11:10:30+00:00",
},
{
"with eval_offset, find previous offset point + default evalDelay",
&Group{
}, "2023-08-28T11:11:00+00:00", "2023-08-28T11:10:30+00:00")
// with eval_offset, find previous offset point + default evalDelay
f(&Group{
EvalOffset: &offset,
Interval: time.Hour,
},
"2023-08-28T11:11:00+00:00",
"2023-08-28T10:30:00+00:00",
},
{
"with eval_offset + default evalDelay",
&Group{
}, "2023-08-28T11:11:00+00:00", "2023-08-28T10:30:00+00:00")
// with eval_offset + default evalDelay
f(&Group{
EvalOffset: &offset,
Interval: time.Hour,
},
"2023-08-28T11:41:00+00:00",
"2023-08-28T11:30:00+00:00",
},
{
"1h interval with eval_delay",
&Group{
}, "2023-08-28T11:41:00+00:00", "2023-08-28T11:30:00+00:00")
// 1h interval with eval_delay
f(&Group{
EvalDelay: &evalDelay,
Interval: time.Hour,
},
"2023-08-28T11:41:00+00:00",
"2023-08-28T11:00:00+00:00",
},
{
"1m interval with eval_delay",
&Group{
}, "2023-08-28T11:41:00+00:00", "2023-08-28T11:00:00+00:00")
// 1m interval with eval_delay
f(&Group{
EvalDelay: &evalDelay,
Interval: time.Minute,
},
"2023-08-28T11:41:13+00:00",
"2023-08-28T11:40:00+00:00",
},
{
"disable alignment with eval_delay",
&Group{
}, "2023-08-28T11:41:13+00:00", "2023-08-28T11:40:00+00:00")
// disable alignment with eval_delay
f(&Group{
EvalDelay: &evalDelay,
Interval: time.Hour,
evalAlignment: &disableAlign,
},
"2023-08-28T11:41:00+00:00",
"2023-08-28T11:40:00+00:00",
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
originT, _ := time.Parse(time.RFC3339, tc.originTS)
expT, _ := time.Parse(time.RFC3339, tc.expTS)
gotTS := tc.g.adjustReqTimestamp(originT)
if !gotTS.Equal(expT) {
t.Fatalf("get wrong prometheus request timestamp, expect %s, got %s", expT, gotTS)
}
})
}
}, "2023-08-28T11:41:00+00:00", "2023-08-28T11:40:00+00:00")
}
func TestRangeIterator(t *testing.T) {
testCases := []struct {
ri rangeIterator
result [][2]time.Time
}{
{
ri: rangeIterator{
start: parseTime(t, "2021-01-01T12:00:00.000Z"),
end: parseTime(t, "2021-01-01T12:30:00.000Z"),
step: 5 * time.Minute,
},
result: [][2]time.Time{
{parseTime(t, "2021-01-01T12:00:00.000Z"), parseTime(t, "2021-01-01T12:05:00.000Z")},
{parseTime(t, "2021-01-01T12:05:00.000Z"), parseTime(t, "2021-01-01T12:10:00.000Z")},
{parseTime(t, "2021-01-01T12:10:00.000Z"), parseTime(t, "2021-01-01T12:15:00.000Z")},
{parseTime(t, "2021-01-01T12:15:00.000Z"), parseTime(t, "2021-01-01T12:20:00.000Z")},
{parseTime(t, "2021-01-01T12:20:00.000Z"), parseTime(t, "2021-01-01T12:25:00.000Z")},
{parseTime(t, "2021-01-01T12:25:00.000Z"), parseTime(t, "2021-01-01T12:30:00.000Z")},
},
},
{
ri: rangeIterator{
start: parseTime(t, "2021-01-01T12:00:00.000Z"),
end: parseTime(t, "2021-01-01T12:30:00.000Z"),
step: 45 * time.Minute,
},
result: [][2]time.Time{
{parseTime(t, "2021-01-01T12:00:00.000Z"), parseTime(t, "2021-01-01T12:30:00.000Z")},
{parseTime(t, "2021-01-01T12:30:00.000Z"), parseTime(t, "2021-01-01T12:30:00.000Z")},
},
},
{
ri: rangeIterator{
start: parseTime(t, "2021-01-01T12:00:12.000Z"),
end: parseTime(t, "2021-01-01T12:00:17.000Z"),
step: time.Second,
},
result: [][2]time.Time{
{parseTime(t, "2021-01-01T12:00:12.000Z"), parseTime(t, "2021-01-01T12:00:13.000Z")},
{parseTime(t, "2021-01-01T12:00:13.000Z"), parseTime(t, "2021-01-01T12:00:14.000Z")},
{parseTime(t, "2021-01-01T12:00:14.000Z"), parseTime(t, "2021-01-01T12:00:15.000Z")},
{parseTime(t, "2021-01-01T12:00:15.000Z"), parseTime(t, "2021-01-01T12:00:16.000Z")},
{parseTime(t, "2021-01-01T12:00:16.000Z"), parseTime(t, "2021-01-01T12:00:17.000Z")},
},
},
}
f := func(ri rangeIterator, resultExpected [][2]time.Time) {
t.Helper()
for i, tc := range testCases {
t.Run(fmt.Sprintf("case %d", i), func(t *testing.T) {
var j int
for tc.ri.next() {
if len(tc.result) < j+1 {
t.Fatalf("unexpected result for iterator on step %d: %v - %v",
j, tc.ri.s, tc.ri.e)
for ri.next() {
if len(resultExpected) < j+1 {
t.Fatalf("unexpected result for iterator on step %d: %v - %v", j, ri.s, ri.e)
}
s, e := tc.ri.s, tc.ri.e
expS, expE := tc.result[j][0], tc.result[j][1]
s, e := ri.s, ri.e
expS, expE := resultExpected[j][0], resultExpected[j][1]
if s != expS {
t.Fatalf("expected to get start=%v; got %v", expS, s)
}
@ -796,8 +702,41 @@ func TestRangeIterator(t *testing.T) {
}
j++
}
})
}
f(rangeIterator{
start: parseTime(t, "2021-01-01T12:00:00.000Z"),
end: parseTime(t, "2021-01-01T12:30:00.000Z"),
step: 5 * time.Minute,
}, [][2]time.Time{
{parseTime(t, "2021-01-01T12:00:00.000Z"), parseTime(t, "2021-01-01T12:05:00.000Z")},
{parseTime(t, "2021-01-01T12:05:00.000Z"), parseTime(t, "2021-01-01T12:10:00.000Z")},
{parseTime(t, "2021-01-01T12:10:00.000Z"), parseTime(t, "2021-01-01T12:15:00.000Z")},
{parseTime(t, "2021-01-01T12:15:00.000Z"), parseTime(t, "2021-01-01T12:20:00.000Z")},
{parseTime(t, "2021-01-01T12:20:00.000Z"), parseTime(t, "2021-01-01T12:25:00.000Z")},
{parseTime(t, "2021-01-01T12:25:00.000Z"), parseTime(t, "2021-01-01T12:30:00.000Z")},
})
f(rangeIterator{
start: parseTime(t, "2021-01-01T12:00:00.000Z"),
end: parseTime(t, "2021-01-01T12:30:00.000Z"),
step: 45 * time.Minute,
}, [][2]time.Time{
{parseTime(t, "2021-01-01T12:00:00.000Z"), parseTime(t, "2021-01-01T12:30:00.000Z")},
{parseTime(t, "2021-01-01T12:30:00.000Z"), parseTime(t, "2021-01-01T12:30:00.000Z")},
})
f(rangeIterator{
start: parseTime(t, "2021-01-01T12:00:12.000Z"),
end: parseTime(t, "2021-01-01T12:00:17.000Z"),
step: time.Second,
}, [][2]time.Time{
{parseTime(t, "2021-01-01T12:00:12.000Z"), parseTime(t, "2021-01-01T12:00:13.000Z")},
{parseTime(t, "2021-01-01T12:00:13.000Z"), parseTime(t, "2021-01-01T12:00:14.000Z")},
{parseTime(t, "2021-01-01T12:00:14.000Z"), parseTime(t, "2021-01-01T12:00:15.000Z")},
{parseTime(t, "2021-01-01T12:00:15.000Z"), parseTime(t, "2021-01-01T12:00:16.000Z")},
{parseTime(t, "2021-01-01T12:00:16.000Z"), parseTime(t, "2021-01-01T12:00:17.000Z")},
})
}
func parseTime(t *testing.T, s string) time.Time {

View file

@ -13,31 +13,43 @@ import (
)
func TestRecordingRule_Exec(t *testing.T) {
f := func(rule *RecordingRule, metrics []datasource.Metric, tssExpected []prompbmarshal.TimeSeries) {
t.Helper()
fq := &datasource.FakeQuerier{}
fq.Add(metrics...)
rule.q = fq
rule.state = &ruleState{
entries: make([]StateEntry, 10),
}
tss, err := rule.exec(context.TODO(), time.Now(), 0)
if err != nil {
t.Fatalf("unexpected RecordingRule.exec error: %s", err)
}
if err := compareTimeSeries(t, tssExpected, tss); err != nil {
t.Fatalf("timeseries missmatch: %s", err)
}
}
timestamp := time.Now()
testCases := []struct {
rule *RecordingRule
metrics []datasource.Metric
expTS []prompbmarshal.TimeSeries
}{
{
&RecordingRule{Name: "foo"},
[]datasource.Metric{metricWithValueAndLabels(t, 10,
"__name__", "bar",
)},
[]prompbmarshal.TimeSeries{
f(&RecordingRule{
Name: "foo",
}, []datasource.Metric{
metricWithValueAndLabels(t, 10, "__name__", "bar"),
}, []prompbmarshal.TimeSeries{
newTimeSeries([]float64{10}, []int64{timestamp.UnixNano()}, map[string]string{
"__name__": "foo",
}),
},
},
{
&RecordingRule{Name: "foobarbaz"},
[]datasource.Metric{
})
f(&RecordingRule{
Name: "foobarbaz",
}, []datasource.Metric{
metricWithValueAndLabels(t, 1, "__name__", "foo", "job", "foo"),
metricWithValueAndLabels(t, 2, "__name__", "bar", "job", "bar"),
metricWithValueAndLabels(t, 3, "__name__", "baz", "job", "baz"),
},
[]prompbmarshal.TimeSeries{
}, []prompbmarshal.TimeSeries{
newTimeSeries([]float64{1}, []int64{timestamp.UnixNano()}, map[string]string{
"__name__": "foobarbaz",
"job": "foo",
@ -50,20 +62,17 @@ func TestRecordingRule_Exec(t *testing.T) {
"__name__": "foobarbaz",
"job": "baz",
}),
},
},
{
&RecordingRule{
})
f(&RecordingRule{
Name: "job:foo",
Labels: map[string]string{
"source": "test",
},
},
[]datasource.Metric{
}, []datasource.Metric{
metricWithValueAndLabels(t, 2, "__name__", "foo", "job", "foo"),
metricWithValueAndLabels(t, 1, "__name__", "bar", "job", "bar", "source", "origin"),
},
[]prompbmarshal.TimeSeries{
}, []prompbmarshal.TimeSeries{
newTimeSeries([]float64{2}, []int64{timestamp.UnixNano()}, map[string]string{
"__name__": "job:foo",
"job": "foo",
@ -75,54 +84,44 @@ func TestRecordingRule_Exec(t *testing.T) {
"source": "test",
"exported_source": "origin",
}),
},
},
}
for _, tc := range testCases {
t.Run(tc.rule.Name, func(t *testing.T) {
fq := &datasource.FakeQuerier{}
fq.Add(tc.metrics...)
tc.rule.q = fq
tc.rule.state = &ruleState{entries: make([]StateEntry, 10)}
tss, err := tc.rule.exec(context.TODO(), time.Now(), 0)
if err != nil {
t.Fatalf("unexpected Exec err: %s", err)
}
if err := compareTimeSeries(t, tc.expTS, tss); err != nil {
t.Fatalf("timeseries missmatch: %s", err)
}
})
}
}
func TestRecordingRule_ExecRange(t *testing.T) {
f := func(rule *RecordingRule, metrics []datasource.Metric, tssExpected []prompbmarshal.TimeSeries) {
t.Helper()
fq := &datasource.FakeQuerier{}
fq.Add(metrics...)
rule.q = fq
tss, err := rule.execRange(context.TODO(), time.Now(), time.Now())
if err != nil {
t.Fatalf("unexpected RecordingRule.execRange error: %s", err)
}
if err := compareTimeSeries(t, tssExpected, tss); err != nil {
t.Fatalf("timeseries missmatch: %s", err)
}
}
timestamp := time.Now()
testCases := []struct {
rule *RecordingRule
metrics []datasource.Metric
expTS []prompbmarshal.TimeSeries
}{
{
&RecordingRule{Name: "foo"},
[]datasource.Metric{metricWithValuesAndLabels(t, []float64{10, 20, 30},
"__name__", "bar",
)},
[]prompbmarshal.TimeSeries{
newTimeSeries([]float64{10, 20, 30},
[]int64{timestamp.UnixNano(), timestamp.UnixNano(), timestamp.UnixNano()},
map[string]string{
f(&RecordingRule{
Name: "foo",
}, []datasource.Metric{
metricWithValuesAndLabels(t, []float64{10, 20, 30}, "__name__", "bar"),
}, []prompbmarshal.TimeSeries{
newTimeSeries([]float64{10, 20, 30}, []int64{timestamp.UnixNano(), timestamp.UnixNano(), timestamp.UnixNano()}, map[string]string{
"__name__": "foo",
}),
},
},
{
&RecordingRule{Name: "foobarbaz"},
[]datasource.Metric{
})
f(&RecordingRule{
Name: "foobarbaz",
}, []datasource.Metric{
metricWithValuesAndLabels(t, []float64{1}, "__name__", "foo", "job", "foo"),
metricWithValuesAndLabels(t, []float64{2, 3}, "__name__", "bar", "job", "bar"),
metricWithValuesAndLabels(t, []float64{4, 5, 6}, "__name__", "baz", "job", "baz"),
},
[]prompbmarshal.TimeSeries{
}, []prompbmarshal.TimeSeries{
newTimeSeries([]float64{1}, []int64{timestamp.UnixNano()}, map[string]string{
"__name__": "foobarbaz",
"job": "foo",
@ -132,22 +131,21 @@ func TestRecordingRule_ExecRange(t *testing.T) {
"job": "bar",
}),
newTimeSeries([]float64{4, 5, 6},
[]int64{timestamp.UnixNano(), timestamp.UnixNano(), timestamp.UnixNano()},
map[string]string{
[]int64{timestamp.UnixNano(), timestamp.UnixNano(), timestamp.UnixNano()}, map[string]string{
"__name__": "foobarbaz",
"job": "baz",
}),
},
},
{
&RecordingRule{Name: "job:foo", Labels: map[string]string{
})
f(&RecordingRule{
Name: "job:foo",
Labels: map[string]string{
"source": "test",
}},
[]datasource.Metric{
},
}, []datasource.Metric{
metricWithValueAndLabels(t, 2, "__name__", "foo", "job", "foo"),
metricWithValueAndLabels(t, 1, "__name__", "bar", "job", "bar"),
},
[]prompbmarshal.TimeSeries{
}, []prompbmarshal.TimeSeries{
newTimeSeries([]float64{2}, []int64{timestamp.UnixNano()}, map[string]string{
"__name__": "job:foo",
"job": "foo",
@ -158,51 +156,22 @@ func TestRecordingRule_ExecRange(t *testing.T) {
"job": "bar",
"source": "test",
}),
},
},
}
for _, tc := range testCases {
t.Run(tc.rule.Name, func(t *testing.T) {
fq := &datasource.FakeQuerier{}
fq.Add(tc.metrics...)
tc.rule.q = fq
tss, err := tc.rule.execRange(context.TODO(), time.Now(), time.Now())
if err != nil {
t.Fatalf("unexpected Exec err: %s", err)
}
if err := compareTimeSeries(t, tc.expTS, tss); err != nil {
t.Fatalf("timeseries missmatch: %s", err)
}
})
}
}
func TestRecordingRuleLimit(t *testing.T) {
timestamp := time.Now()
testCases := []struct {
limit int
err string
}{
{
limit: 0,
},
{
limit: -1,
},
{
limit: 1,
err: "exec exceeded limit of 1 with 3 series",
},
{
limit: 2,
err: "exec exceeded limit of 2 with 3 series",
},
}
func TestRecordingRuleLimit_Failure(t *testing.T) {
f := func(limit int, errStrExpected string) {
t.Helper()
testMetrics := []datasource.Metric{
metricWithValuesAndLabels(t, []float64{1}, "__name__", "foo", "job", "foo"),
metricWithValuesAndLabels(t, []float64{2, 3}, "__name__", "bar", "job", "bar"),
metricWithValuesAndLabels(t, []float64{4, 5, 6}, "__name__", "baz", "job", "baz"),
}
fq := &datasource.FakeQuerier{}
fq.Add(testMetrics...)
rule := &RecordingRule{Name: "job:foo",
state: &ruleState{entries: make([]StateEntry, 10)},
Labels: map[string]string{
@ -212,19 +181,57 @@ func TestRecordingRuleLimit(t *testing.T) {
errors: utils.GetOrCreateCounter(`vmalert_recording_rules_errors_total{alertname="job:foo"}`),
},
}
var err error
for _, testCase := range testCases {
fq := &datasource.FakeQuerier{}
fq.Add(testMetrics...)
rule.q = fq
_, err = rule.exec(context.TODO(), timestamp, testCase.limit)
if err != nil && !strings.EqualFold(err.Error(), testCase.err) {
t.Fatal(err)
_, err := rule.exec(context.TODO(), time.Now(), limit)
if err == nil {
t.Fatalf("expecting non-nil error")
}
errStr := err.Error()
if !strings.Contains(errStr, errStrExpected) {
t.Fatalf("missing %q in the error %q", errStrExpected, errStr)
}
}
f(1, "exec exceeded limit of 1 with 3 series")
f(2, "exec exceeded limit of 2 with 3 series")
}
func TestRecordingRule_ExecNegative(t *testing.T) {
func TestRecordingRuleLimit_Success(t *testing.T) {
f := func(limit int) {
t.Helper()
testMetrics := []datasource.Metric{
metricWithValuesAndLabels(t, []float64{1}, "__name__", "foo", "job", "foo"),
metricWithValuesAndLabels(t, []float64{2, 3}, "__name__", "bar", "job", "bar"),
metricWithValuesAndLabels(t, []float64{4, 5, 6}, "__name__", "baz", "job", "baz"),
}
fq := &datasource.FakeQuerier{}
fq.Add(testMetrics...)
rule := &RecordingRule{Name: "job:foo",
state: &ruleState{entries: make([]StateEntry, 10)},
Labels: map[string]string{
"source": "test_limit",
},
metrics: &recordingRuleMetrics{
errors: utils.GetOrCreateCounter(`vmalert_recording_rules_errors_total{alertname="job:foo"}`),
},
}
rule.q = fq
_, err := rule.exec(context.TODO(), time.Now(), limit)
if err != nil {
t.Fatalf("unexpected error: %s", err)
}
}
f(0)
f(-1)
}
func TestRecordingRuleExec_Negative(t *testing.T) {
rr := &RecordingRule{
Name: "job:foo",
Labels: map[string]string{
@ -256,6 +263,6 @@ func TestRecordingRule_ExecNegative(t *testing.T) {
_, err = rr.exec(context.TODO(), time.Now(), 0)
if err != nil {
t.Fatal(err)
t.Fatalf("cannot execute recroding rule: %s", err)
}
}

View file

@ -7,10 +7,11 @@ import (
textTpl "text/template"
)
func TestTemplateFuncs(t *testing.T) {
funcs := templateFuncs()
func TestTemplateFuncs_StringConversion(t *testing.T) {
f := func(funcName, s, resultExpected string) {
t.Helper()
funcs := templateFuncs()
v := funcs[funcName]
fLocal := v.(func(s string) string)
result := fLocal(s)
@ -18,6 +19,7 @@ func TestTemplateFuncs(t *testing.T) {
t.Fatalf("unexpected result for %s(%q); got\n%s\nwant\n%s", funcName, s, result, resultExpected)
}
}
f("title", "foo bar", "Foo Bar")
f("toUpper", "foo", "FOO")
f("toLower", "FOO", "foo")
@ -31,7 +33,10 @@ func TestTemplateFuncs(t *testing.T) {
f("stripPort", "foo:1234", "foo")
f("stripDomain", "foo.bar.baz", "foo")
f("stripDomain", "foo.bar:123", "foo:123")
}
func TestTemplateFuncs_Match(t *testing.T) {
funcs := templateFuncs()
// check "match" func
matchFunc := funcs["match"].(func(pattern, s string) (bool, error))
if _, err := matchFunc("invalid[regexp", "abc"); err == nil {
@ -51,9 +56,13 @@ func TestTemplateFuncs(t *testing.T) {
if !ok {
t.Fatalf("unexpected mismatch")
}
}
formatting := func(funcName string, p any, resultExpected string) {
func TestTemplateFuncs_Formatting(t *testing.T) {
f := func(funcName string, p any, resultExpected string) {
t.Helper()
funcs := templateFuncs()
v := funcs[funcName]
fLocal := v.(func(s any) (string, error))
result, err := fLocal(p)
@ -64,32 +73,33 @@ func TestTemplateFuncs(t *testing.T) {
t.Fatalf("unexpected result for %s(%f); got\n%s\nwant\n%s", funcName, p, result, resultExpected)
}
}
formatting("humanize1024", float64(0), "0")
formatting("humanize1024", math.Inf(0), "+Inf")
formatting("humanize1024", math.NaN(), "NaN")
formatting("humanize1024", float64(127087), "124.1ki")
formatting("humanize1024", float64(130137088), "124.1Mi")
formatting("humanize1024", float64(133260378112), "124.1Gi")
formatting("humanize1024", float64(136458627186688), "124.1Ti")
formatting("humanize1024", float64(139733634239168512), "124.1Pi")
formatting("humanize1024", float64(143087241460908556288), "124.1Ei")
formatting("humanize1024", float64(146521335255970361638912), "124.1Zi")
formatting("humanize1024", float64(150037847302113650318245888), "124.1Yi")
formatting("humanize1024", float64(153638755637364377925883789312), "1.271e+05Yi")
formatting("humanize", float64(127087), "127.1k")
formatting("humanize", float64(136458627186688), "136.5T")
f("humanize1024", float64(0), "0")
f("humanize1024", math.Inf(0), "+Inf")
f("humanize1024", math.NaN(), "NaN")
f("humanize1024", float64(127087), "124.1ki")
f("humanize1024", float64(130137088), "124.1Mi")
f("humanize1024", float64(133260378112), "124.1Gi")
f("humanize1024", float64(136458627186688), "124.1Ti")
f("humanize1024", float64(139733634239168512), "124.1Pi")
f("humanize1024", float64(143087241460908556288), "124.1Ei")
f("humanize1024", float64(146521335255970361638912), "124.1Zi")
f("humanize1024", float64(150037847302113650318245888), "124.1Yi")
f("humanize1024", float64(153638755637364377925883789312), "1.271e+05Yi")
formatting("humanizeDuration", 1, "1s")
formatting("humanizeDuration", 0.2, "200ms")
formatting("humanizeDuration", 42000, "11h 40m 0s")
formatting("humanizeDuration", 16790555, "194d 8h 2m 35s")
f("humanize", float64(127087), "127.1k")
f("humanize", float64(136458627186688), "136.5T")
formatting("humanizePercentage", 1, "100%")
formatting("humanizePercentage", 0.8, "80%")
formatting("humanizePercentage", 0.015, "1.5%")
f("humanizeDuration", 1, "1s")
f("humanizeDuration", 0.2, "200ms")
f("humanizeDuration", 42000, "11h 40m 0s")
f("humanizeDuration", 16790555, "194d 8h 2m 35s")
formatting("humanizeTimestamp", 1679055557, "2023-03-17 12:19:17 +0000 UTC")
f("humanizePercentage", 1, "100%")
f("humanizePercentage", 0.8, "80%")
f("humanizePercentage", 0.015, "1.5%")
f("humanizeTimestamp", 1679055557, "2023-03-17 12:19:17 +0000 UTC")
}
func mkTemplate(current, replacement any) textTemplate {
@ -138,50 +148,82 @@ func equalTemplates(tmpls ...*textTpl.Template) bool {
return true
}
func TestTemplates_Load(t *testing.T) {
testCases := []struct {
name string
initialTemplate textTemplate
pathPatterns []string
overwrite bool
expectedTemplate textTemplate
expErr string
}{
{
"non existing path undefined template override",
mkTemplate(nil, nil),
[]string{
func TestTemplatesLoad_Failure(t *testing.T) {
f := func(pathPatterns []string, expectedErrStr string) {
t.Helper()
err := Load(pathPatterns, false)
if err == nil {
t.Fatalf("expecting non-nil error")
}
errStr := err.Error()
if !strings.Contains(errStr, expectedErrStr) {
t.Fatalf("the returned error %q doesn't contain %q", errStr, expectedErrStr)
}
}
// load template with syntax error
f([]string{
"templates/other/nested/bad0-*.tpl",
"templates/test/good0-*.tpl",
}, "failed to parse template glob")
}
func TestTemplatesLoad_Success(t *testing.T) {
f := func(initialTmpl textTemplate, pathPatterns []string, overwrite bool, expectedTmpl textTemplate) {
t.Helper()
masterTmplOrig := masterTmpl
masterTmpl = initialTmpl
defer func() {
masterTmpl = masterTmplOrig
}()
if err := Load(pathPatterns, overwrite); err != nil {
t.Fatalf("cannot load templates: %s", err)
}
if !equalTemplates(masterTmpl.replacement, expectedTmpl.replacement) {
t.Fatalf("unexpected replacement template\ngot\n%+v\nwant\n%+v", masterTmpl.replacement, expectedTmpl.replacement)
}
if !equalTemplates(masterTmpl.current, expectedTmpl.current) {
t.Fatalf("unexpected current template\ngot\n%+v\nwant\n%+v", masterTmpl.current, expectedTmpl.current)
}
}
// non existing path undefined template override
initialTmpl := mkTemplate(nil, nil)
pathPatterns := []string{
"templates/non-existing/good-*.tpl",
"templates/absent/good-*.tpl",
},
true,
mkTemplate(``, nil),
"",
},
{
"non existing path defined template override",
mkTemplate(`
}
overwrite := true
expectedTmpl := mkTemplate(``, nil)
f(initialTmpl, pathPatterns, overwrite, expectedTmpl)
// non existing path defined template override
initialTmpl = mkTemplate(`
{{- define "test.1" -}}
{{- printf "value" -}}
{{- end -}}
`, nil),
[]string{
`, nil)
pathPatterns = []string{
"templates/non-existing/good-*.tpl",
"templates/absent/good-*.tpl",
},
true,
mkTemplate(``, nil),
"",
},
{
"existing path undefined template override",
mkTemplate(nil, nil),
[]string{
}
overwrite = true
expectedTmpl = mkTemplate(``, nil)
f(initialTmpl, pathPatterns, overwrite, expectedTmpl)
// existing path undefined template override
initialTmpl = mkTemplate(nil, nil)
pathPatterns = []string{
"templates/other/nested/good0-*.tpl",
"templates/test/good0-*.tpl",
},
false,
mkTemplate(`
}
overwrite = false
expectedTmpl = mkTemplate(`
{{- define "good0-test.tpl" -}}{{- end -}}
{{- define "test.0" -}}
{{ printf "Hello %s!" externalURL }}
@ -195,22 +237,21 @@ func TestTemplates_Load(t *testing.T) {
{{- define "test.3" -}}
{{ printf "Hello %s!" externalURL }}
{{- end -}}
`, nil),
"",
},
{
"existing path defined template override",
mkTemplate(`
`, nil)
f(initialTmpl, pathPatterns, overwrite, expectedTmpl)
// existing path defined template override
initialTmpl = mkTemplate(`
{{- define "test.1" -}}
{{ printf "Hello %s!" "world" }}
{{- end -}}
`, nil),
[]string{
`, nil)
pathPatterns = []string{
"templates/other/nested/good0-*.tpl",
"templates/test/good0-*.tpl",
},
false,
mkTemplate(`
}
overwrite = false
expectedTmpl = mkTemplate(`
{{- define "good0-test.tpl" -}}{{- end -}}
{{- define "test.0" -}}
{{ printf "Hello %s!" externalURL }}
@ -238,95 +279,57 @@ func TestTemplates_Load(t *testing.T) {
{{- define "test.3" -}}
{{ printf "Hello %s!" externalURL }}
{{- end -}}
`),
"",
},
{
"load template with syntax error",
mkTemplate(`
{{- define "test.1" -}}
{{ printf "Hello %s!" "world" }}
{{- end -}}
`, nil),
[]string{
"templates/other/nested/bad0-*.tpl",
"templates/test/good0-*.tpl",
},
false,
mkTemplate(`
{{- define "test.1" -}}
{{ printf "Hello %s!" "world" }}
{{- end -}}
`, nil),
"failed to parse template glob",
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
masterTmpl = tc.initialTemplate
err := Load(tc.pathPatterns, tc.overwrite)
if tc.expErr == "" && err != nil {
t.Error("happened error that wasn't expected: %w", err)
}
if tc.expErr != "" && err == nil {
t.Error("%+w", err)
t.Error("expected error that didn't happened")
}
if err != nil && !strings.Contains(err.Error(), tc.expErr) {
t.Error("%+w", err)
t.Error("expected string doesn't exist in error message")
}
if !equalTemplates(masterTmpl.replacement, tc.expectedTemplate.replacement) {
t.Fatalf("replacement template is not as expected")
}
if !equalTemplates(masterTmpl.current, tc.expectedTemplate.current) {
t.Fatalf("current template is not as expected")
}
})
}
`)
f(initialTmpl, pathPatterns, overwrite, expectedTmpl)
}
func TestTemplates_Reload(t *testing.T) {
testCases := []struct {
name string
initialTemplate textTemplate
expectedTemplate textTemplate
}{
{
"empty current and replacement templates",
mkTemplate(nil, nil),
mkTemplate(nil, nil),
},
{
"empty current template only",
mkTemplate(`
func TestTemplatesReload(t *testing.T) {
f := func(initialTmpl, expectedTmpl textTemplate) {
t.Helper()
masterTmplOrig := masterTmpl
masterTmpl = initialTmpl
defer func() {
masterTmpl = masterTmplOrig
}()
Reload()
if !equalTemplates(masterTmpl.replacement, expectedTmpl.replacement) {
t.Fatalf("unexpected replacement template\ngot\n%+v\nwant\n%+v", masterTmpl.replacement, expectedTmpl.replacement)
}
if !equalTemplates(masterTmpl.current, expectedTmpl.current) {
t.Fatalf("unexpected current template\ngot\n%+v\nwant\n%+v", masterTmpl.current, expectedTmpl.current)
}
}
// empty current and replacement templates
f(mkTemplate(nil, nil), mkTemplate(nil, nil))
// empty current template only
f(mkTemplate(`
{{- define "test.1" -}}
{{- printf "value" -}}
{{- end -}}
`, nil),
mkTemplate(`
`, nil), mkTemplate(`
{{- define "test.1" -}}
{{- printf "value" -}}
{{- end -}}
`, nil),
},
{
"empty replacement template only",
mkTemplate(nil, `
`, nil))
// empty replacement template only
f(mkTemplate(nil, `
{{- define "test.1" -}}
{{- printf "value" -}}
{{- end -}}
`),
mkTemplate(`
`), mkTemplate(`
{{- define "test.1" -}}
{{- printf "value" -}}
{{- end -}}
`, nil),
},
{
"defined both templates",
mkTemplate(`
`, nil))
// defined both templates
f(mkTemplate(`
{{- define "test.0" -}}
{{- printf "value" -}}
{{- end -}}
@ -337,25 +340,9 @@ func TestTemplates_Reload(t *testing.T) {
{{- define "test.1" -}}
{{- printf "after" -}}
{{- end -}}
`),
mkTemplate(`
`), mkTemplate(`
{{- define "test.1" -}}
{{- printf "after" -}}
{{- end -}}
`, nil),
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
masterTmpl = tc.initialTemplate
Reload()
if !equalTemplates(masterTmpl.replacement, tc.expectedTemplate.replacement) {
t.Fatalf("replacement template is not as expected")
}
if !equalTemplates(masterTmpl.current, tc.expectedTemplate.current) {
t.Fatalf("current template is not as expected")
}
})
}
`, nil))
}

View file

@ -7,35 +7,31 @@ import (
)
func TestErrGroup(t *testing.T) {
testCases := []struct {
errs []error
exp string
}{
{nil, ""},
{[]error{errors.New("timeout")}, "errors(1): timeout"},
{
[]error{errors.New("timeout"), errors.New("deadline")},
"errors(2): timeout\ndeadline",
},
}
for _, tc := range testCases {
eg := new(ErrGroup)
for _, err := range tc.errs {
f := func(errs []error, resultExpected string) {
t.Helper()
eg := &ErrGroup{}
for _, err := range errs {
eg.Add(err)
}
if len(tc.errs) == 0 {
if len(errs) == 0 {
if eg.Err() != nil {
t.Fatalf("expected to get nil error")
}
continue
return
}
if eg.Err() == nil {
t.Fatalf("expected to get non-nil error")
}
if eg.Error() != tc.exp {
t.Fatalf("expected to have: \n%q\ngot:\n%q", tc.exp, eg.Error())
result := eg.Error()
if result != resultExpected {
t.Fatalf("unexpected result\ngot\n%v\nwant\n%v", result, resultExpected)
}
}
f(nil, "")
f([]error{errors.New("timeout")}, "errors(1): timeout")
f([]error{errors.New("timeout"), errors.New("deadline")}, "errors(2): timeout\ndeadline")
}
// TestErrGroupConcurrent supposed to test concurrent

View file

@ -43,16 +43,16 @@ func TestHandler(t *testing.T) {
t.Fatalf("unexpected err %s", err)
}
if code != resp.StatusCode {
t.Errorf("unexpected status code %d want %d", resp.StatusCode, code)
t.Fatalf("unexpected status code %d want %d", resp.StatusCode, code)
}
defer func() {
if err := resp.Body.Close(); err != nil {
t.Errorf("err closing body %s", err)
t.Fatalf("err closing body %s", err)
}
}()
if to != nil {
if err = json.NewDecoder(resp.Body).Decode(to); err != nil {
t.Errorf("unexpected err %s", err)
t.Fatalf("unexpected err %s", err)
}
}
}
@ -92,13 +92,13 @@ func TestHandler(t *testing.T) {
lr := listAlertsResponse{}
getResp(t, ts.URL+"/api/v1/alerts", &lr, 200)
if length := len(lr.Data.Alerts); length != 1 {
t.Errorf("expected 1 alert got %d", length)
t.Fatalf("expected 1 alert got %d", length)
}
lr = listAlertsResponse{}
getResp(t, ts.URL+"/vmalert/api/v1/alerts", &lr, 200)
if length := len(lr.Data.Alerts); length != 1 {
t.Errorf("expected 1 alert got %d", length)
t.Fatalf("expected 1 alert got %d", length)
}
})
t.Run("/api/v1/alert?alertID&groupID", func(t *testing.T) {
@ -106,13 +106,13 @@ func TestHandler(t *testing.T) {
alert := &apiAlert{}
getResp(t, ts.URL+"/"+expAlert.APILink(), alert, 200)
if !reflect.DeepEqual(alert, expAlert) {
t.Errorf("expected %v is equal to %v", alert, expAlert)
t.Fatalf("expected %v is equal to %v", alert, expAlert)
}
alert = &apiAlert{}
getResp(t, ts.URL+"/vmalert/"+expAlert.APILink(), alert, 200)
if !reflect.DeepEqual(alert, expAlert) {
t.Errorf("expected %v is equal to %v", alert, expAlert)
t.Fatalf("expected %v is equal to %v", alert, expAlert)
}
})
@ -135,13 +135,13 @@ func TestHandler(t *testing.T) {
lr := listGroupsResponse{}
getResp(t, ts.URL+"/api/v1/rules", &lr, 200)
if length := len(lr.Data.Groups); length != 1 {
t.Errorf("expected 1 group got %d", length)
t.Fatalf("expected 1 group got %d", length)
}
lr = listGroupsResponse{}
getResp(t, ts.URL+"/vmalert/api/v1/rules", &lr, 200)
if length := len(lr.Data.Groups); length != 1 {
t.Errorf("expected 1 group got %d", length)
t.Fatalf("expected 1 group got %d", length)
}
})
t.Run("/api/v1/rule?ruleID&groupID", func(t *testing.T) {
@ -150,14 +150,14 @@ func TestHandler(t *testing.T) {
getResp(t, ts.URL+"/"+expRule.APILink(), &gotRule, 200)
if expRule.ID != gotRule.ID {
t.Errorf("expected to get Rule %q; got %q instead", expRule.ID, gotRule.ID)
t.Fatalf("expected to get Rule %q; got %q instead", expRule.ID, gotRule.ID)
}
gotRule = apiRule{}
getResp(t, ts.URL+"/vmalert/"+expRule.APILink(), &gotRule, 200)
if expRule.ID != gotRule.ID {
t.Errorf("expected to get Rule %q; got %q instead", expRule.ID, gotRule.ID)
t.Fatalf("expected to get Rule %q; got %q instead", expRule.ID, gotRule.ID)
}
gotRuleWithUpdates := apiRuleWithUpdates{}
@ -173,7 +173,7 @@ func TestHandler(t *testing.T) {
lr := listGroupsResponse{}
getResp(t, ts.URL+url, &lr, 200)
if length := len(lr.Data.Groups); length != expGroups {
t.Errorf("expected %d groups got %d", expGroups, length)
t.Fatalf("expected %d groups got %d", expGroups, length)
}
if len(lr.Data.Groups) < 1 {
return
@ -183,7 +183,7 @@ func TestHandler(t *testing.T) {
rulesN += len(gr.Rules)
}
if rulesN != expRules {
t.Errorf("expected %d rules got %d", expRules, rulesN)
t.Fatalf("expected %d rules got %d", expRules, rulesN)
}
}
@ -248,16 +248,16 @@ func TestEmptyResponse(t *testing.T) {
t.Fatalf("unexpected err %s", err)
}
if code != resp.StatusCode {
t.Errorf("unexpected status code %d want %d", resp.StatusCode, code)
t.Fatalf("unexpected status code %d want %d", resp.StatusCode, code)
}
defer func() {
if err := resp.Body.Close(); err != nil {
t.Errorf("err closing body %s", err)
t.Fatalf("err closing body %s", err)
}
}()
if to != nil {
if err = json.NewDecoder(resp.Body).Decode(to); err != nil {
t.Errorf("unexpected err %s", err)
t.Fatalf("unexpected err %s", err)
}
}
}
@ -266,13 +266,13 @@ func TestEmptyResponse(t *testing.T) {
lr := listAlertsResponse{}
getResp(t, ts.URL+"/api/v1/alerts", &lr, 200)
if lr.Data.Alerts == nil {
t.Errorf("expected /api/v1/alerts response to have non-nil data")
t.Fatalf("expected /api/v1/alerts response to have non-nil data")
}
lr = listAlertsResponse{}
getResp(t, ts.URL+"/vmalert/api/v1/alerts", &lr, 200)
if lr.Data.Alerts == nil {
t.Errorf("expected /api/v1/alerts response to have non-nil data")
t.Fatalf("expected /api/v1/alerts response to have non-nil data")
}
})
@ -280,13 +280,13 @@ func TestEmptyResponse(t *testing.T) {
lr := listGroupsResponse{}
getResp(t, ts.URL+"/api/v1/rules", &lr, 200)
if lr.Data.Groups == nil {
t.Errorf("expected /api/v1/rules response to have non-nil data")
t.Fatalf("expected /api/v1/rules response to have non-nil data")
}
lr = listGroupsResponse{}
getResp(t, ts.URL+"/vmalert/api/v1/rules", &lr, 200)
if lr.Data.Groups == nil {
t.Errorf("expected /api/v1/rules response to have non-nil data")
t.Fatalf("expected /api/v1/rules response to have non-nil data")
}
})

View file

@ -13,11 +13,11 @@ func TestUrlValuesToStrings(t *testing.T) {
res := urlValuesToStrings(mapQueryParams)
if len(res) != len(expectedRes) {
t.Errorf("Expected length %d, but got %d", len(expectedRes), len(res))
t.Fatalf("Expected length %d, but got %d", len(expectedRes), len(res))
}
for ind, val := range expectedRes {
if val != res[ind] {
t.Errorf("Expected %v; but got %v", val, res[ind])
t.Fatalf("Expected %v; but got %v", val, res[ind])
}
}
}

View file

@ -23,7 +23,7 @@ const (
)
// This test simulates close process if user abort it
func Test_prometheusProcessor_run(t *testing.T) {
func TestPrometheusProcessorRun(t *testing.T) {
t.Skip()
defer func() { isSilent = false }()

View file

@ -255,8 +255,7 @@ func (rws *RemoteWriteServer) importNativeHandler(t *testing.T) http.Handler {
if !reflect.DeepEqual(gotTimeSeries, rws.expectedSeries) {
w.WriteHeader(http.StatusInternalServerError)
t.Errorf("datasets not equal, expected: %#v;\n got: %#v", rws.expectedSeries, gotTimeSeries)
return
t.Fatalf("datasets not equal, expected: %#v;\n got: %#v", rws.expectedSeries, gotTimeSeries)
}
w.WriteHeader(http.StatusNoContent)

View file

@ -5,20 +5,22 @@ import (
"testing"
)
func Test_cleanDirectory(t *testing.T) {
func TestCleanDirectory(t *testing.T) {
f := func(dir, exp string) {
t.Helper()
got := cleanDirectory(dir)
if got != exp {
t.Errorf("expected dir %q, got %q", exp, got)
t.Fatalf("expected dir %q, got %q", exp, got)
}
}
f("/foo/", "foo/")
f("//foo/", "foo/")
f("foo", "foo/")
}
func Test_FSInit(t *testing.T) {
func TestFSInit(t *testing.T) {
f := func(expErr string, params ...string) {
t.Helper()