2020-04-06 11:44:03 +00:00
|
|
|
package main
|
|
|
|
|
|
|
|
import (
|
|
|
|
"context"
|
2020-11-09 22:27:32 +00:00
|
|
|
"errors"
|
2020-12-19 12:10:59 +00:00
|
|
|
"reflect"
|
2020-11-09 22:27:32 +00:00
|
|
|
"strings"
|
2020-04-06 11:44:03 +00:00
|
|
|
"testing"
|
|
|
|
"time"
|
|
|
|
|
|
|
|
"github.com/VictoriaMetrics/VictoriaMetrics/app/vmalert/datasource"
|
|
|
|
"github.com/VictoriaMetrics/VictoriaMetrics/app/vmalert/notifier"
|
2020-04-27 21:18:02 +00:00
|
|
|
"github.com/VictoriaMetrics/VictoriaMetrics/lib/prompbmarshal"
|
2020-04-06 11:44:03 +00:00
|
|
|
)
|
|
|
|
|
2020-06-01 10:46:37 +00:00
|
|
|
func TestAlertingRule_ToTimeSeries(t *testing.T) {
|
2020-04-27 21:18:02 +00:00
|
|
|
timestamp := time.Now()
|
|
|
|
testCases := []struct {
|
2020-06-01 10:46:37 +00:00
|
|
|
rule *AlertingRule
|
2020-04-27 21:18:02 +00:00
|
|
|
alert *notifier.Alert
|
|
|
|
expTS []prompbmarshal.TimeSeries
|
|
|
|
}{
|
|
|
|
{
|
2020-06-01 10:46:37 +00:00
|
|
|
newTestAlertingRule("instant", 0),
|
2020-04-27 21:18:02 +00:00
|
|
|
¬ifier.Alert{State: notifier.StateFiring},
|
|
|
|
[]prompbmarshal.TimeSeries{
|
2021-06-09 09:20:38 +00:00
|
|
|
newTimeSeries([]float64{1}, []int64{timestamp.UnixNano()}, map[string]string{
|
2020-04-27 21:18:02 +00:00
|
|
|
"__name__": alertMetricName,
|
|
|
|
alertStateLabel: notifier.StateFiring.String(),
|
2021-06-09 09:20:38 +00:00
|
|
|
}),
|
2020-04-27 21:18:02 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
2020-06-01 10:46:37 +00:00
|
|
|
newTestAlertingRule("instant extra labels", 0),
|
2020-04-27 21:18:02 +00:00
|
|
|
¬ifier.Alert{State: notifier.StateFiring, Labels: map[string]string{
|
|
|
|
"job": "foo",
|
|
|
|
"instance": "bar",
|
|
|
|
}},
|
|
|
|
[]prompbmarshal.TimeSeries{
|
2021-06-09 09:20:38 +00:00
|
|
|
newTimeSeries([]float64{1}, []int64{timestamp.UnixNano()}, map[string]string{
|
2020-04-27 21:18:02 +00:00
|
|
|
"__name__": alertMetricName,
|
|
|
|
alertStateLabel: notifier.StateFiring.String(),
|
|
|
|
"job": "foo",
|
|
|
|
"instance": "bar",
|
2021-06-09 09:20:38 +00:00
|
|
|
}),
|
2020-04-27 21:18:02 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
2020-06-01 10:46:37 +00:00
|
|
|
newTestAlertingRule("instant labels override", 0),
|
2020-04-27 21:18:02 +00:00
|
|
|
¬ifier.Alert{State: notifier.StateFiring, Labels: map[string]string{
|
|
|
|
alertStateLabel: "foo",
|
|
|
|
"__name__": "bar",
|
|
|
|
}},
|
|
|
|
[]prompbmarshal.TimeSeries{
|
2021-06-09 09:20:38 +00:00
|
|
|
newTimeSeries([]float64{1}, []int64{timestamp.UnixNano()}, map[string]string{
|
2020-04-27 21:18:02 +00:00
|
|
|
"__name__": alertMetricName,
|
|
|
|
alertStateLabel: notifier.StateFiring.String(),
|
2021-06-09 09:20:38 +00:00
|
|
|
}),
|
2020-04-27 21:18:02 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
2020-06-01 10:46:37 +00:00
|
|
|
newTestAlertingRule("for", time.Second),
|
2020-04-27 21:18:02 +00:00
|
|
|
¬ifier.Alert{State: notifier.StateFiring, Start: timestamp.Add(time.Second)},
|
|
|
|
[]prompbmarshal.TimeSeries{
|
2021-06-09 09:20:38 +00:00
|
|
|
newTimeSeries([]float64{1}, []int64{timestamp.UnixNano()}, map[string]string{
|
2020-04-27 21:18:02 +00:00
|
|
|
"__name__": alertMetricName,
|
|
|
|
alertStateLabel: notifier.StateFiring.String(),
|
2021-06-09 09:20:38 +00:00
|
|
|
}),
|
|
|
|
newTimeSeries([]float64{float64(timestamp.Add(time.Second).Unix())},
|
|
|
|
[]int64{timestamp.UnixNano()},
|
|
|
|
map[string]string{
|
2021-10-22 09:30:38 +00:00
|
|
|
"__name__": alertForStateMetricName,
|
2021-06-09 09:20:38 +00:00
|
|
|
}),
|
2020-04-27 21:18:02 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
2020-06-01 10:46:37 +00:00
|
|
|
newTestAlertingRule("for pending", 10*time.Second),
|
2020-04-27 21:18:02 +00:00
|
|
|
¬ifier.Alert{State: notifier.StatePending, Start: timestamp.Add(time.Second)},
|
|
|
|
[]prompbmarshal.TimeSeries{
|
2021-06-09 09:20:38 +00:00
|
|
|
newTimeSeries([]float64{1}, []int64{timestamp.UnixNano()}, map[string]string{
|
2020-04-27 21:18:02 +00:00
|
|
|
"__name__": alertMetricName,
|
|
|
|
alertStateLabel: notifier.StatePending.String(),
|
2021-06-09 09:20:38 +00:00
|
|
|
}),
|
|
|
|
newTimeSeries([]float64{float64(timestamp.Add(time.Second).Unix())},
|
|
|
|
[]int64{timestamp.UnixNano()},
|
|
|
|
map[string]string{
|
2021-10-22 09:30:38 +00:00
|
|
|
"__name__": alertForStateMetricName,
|
2021-06-09 09:20:38 +00:00
|
|
|
}),
|
2020-04-27 21:18:02 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
for _, tc := range testCases {
|
|
|
|
t.Run(tc.rule.Name, func(t *testing.T) {
|
2020-06-01 10:46:37 +00:00
|
|
|
tc.rule.alerts[tc.alert.ID] = tc.alert
|
2021-06-09 09:20:38 +00:00
|
|
|
tss := tc.rule.toTimeSeries(timestamp.Unix())
|
2020-06-01 10:46:37 +00:00
|
|
|
if err := compareTimeSeries(t, tc.expTS, tss); err != nil {
|
|
|
|
t.Fatalf("timeseries missmatch: %s", err)
|
2020-04-27 21:18:02 +00:00
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-06-01 10:46:37 +00:00
|
|
|
func TestAlertingRule_Exec(t *testing.T) {
|
2020-06-09 12:21:20 +00:00
|
|
|
const defaultStep = 5 * time.Millisecond
|
2021-10-22 09:30:38 +00:00
|
|
|
type testAlert struct {
|
|
|
|
labels []string
|
|
|
|
alert *notifier.Alert
|
|
|
|
}
|
2020-04-06 11:44:03 +00:00
|
|
|
testCases := []struct {
|
2020-06-01 10:46:37 +00:00
|
|
|
rule *AlertingRule
|
2020-04-06 11:44:03 +00:00
|
|
|
steps [][]datasource.Metric
|
2021-10-22 09:30:38 +00:00
|
|
|
expAlerts []testAlert
|
2020-04-06 11:44:03 +00:00
|
|
|
}{
|
|
|
|
{
|
2020-06-01 10:46:37 +00:00
|
|
|
newTestAlertingRule("empty", 0),
|
2020-04-06 11:44:03 +00:00
|
|
|
[][]datasource.Metric{},
|
2021-10-22 09:30:38 +00:00
|
|
|
nil,
|
2020-04-06 11:44:03 +00:00
|
|
|
},
|
2020-05-04 21:51:22 +00:00
|
|
|
{
|
2020-06-01 10:46:37 +00:00
|
|
|
newTestAlertingRule("empty labels", 0),
|
2020-05-04 21:51:22 +00:00
|
|
|
[][]datasource.Metric{
|
2021-06-09 09:20:38 +00:00
|
|
|
{datasource.Metric{Values: []float64{1}, Timestamps: []int64{1}}},
|
2020-05-04 21:51:22 +00:00
|
|
|
},
|
2021-10-22 09:30:38 +00:00
|
|
|
[]testAlert{
|
|
|
|
{alert: ¬ifier.Alert{State: notifier.StateFiring}},
|
2020-05-04 21:51:22 +00:00
|
|
|
},
|
|
|
|
},
|
2020-04-06 11:44:03 +00:00
|
|
|
{
|
2020-06-01 10:46:37 +00:00
|
|
|
newTestAlertingRule("single-firing", 0),
|
2020-04-06 11:44:03 +00:00
|
|
|
[][]datasource.Metric{
|
2020-05-04 21:51:22 +00:00
|
|
|
{metricWithLabels(t, "name", "foo")},
|
2020-04-06 11:44:03 +00:00
|
|
|
},
|
2021-10-22 09:30:38 +00:00
|
|
|
[]testAlert{
|
|
|
|
{labels: []string{"name", "foo"}, alert: ¬ifier.Alert{State: notifier.StateFiring}},
|
2020-04-06 11:44:03 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
2020-06-01 10:46:37 +00:00
|
|
|
newTestAlertingRule("single-firing=>inactive", 0),
|
2020-04-06 11:44:03 +00:00
|
|
|
[][]datasource.Metric{
|
2020-05-04 21:51:22 +00:00
|
|
|
{metricWithLabels(t, "name", "foo")},
|
2020-04-06 11:44:03 +00:00
|
|
|
{},
|
|
|
|
},
|
2021-10-22 09:30:38 +00:00
|
|
|
[]testAlert{
|
|
|
|
{labels: []string{"name", "foo"}, alert: ¬ifier.Alert{State: notifier.StateInactive}},
|
2020-04-06 11:44:03 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
2020-06-01 10:46:37 +00:00
|
|
|
newTestAlertingRule("single-firing=>inactive=>firing", 0),
|
2020-04-06 11:44:03 +00:00
|
|
|
[][]datasource.Metric{
|
2020-05-04 21:51:22 +00:00
|
|
|
{metricWithLabels(t, "name", "foo")},
|
2020-04-06 11:44:03 +00:00
|
|
|
{},
|
2020-05-04 21:51:22 +00:00
|
|
|
{metricWithLabels(t, "name", "foo")},
|
2020-04-06 11:44:03 +00:00
|
|
|
},
|
2021-10-22 09:30:38 +00:00
|
|
|
[]testAlert{
|
|
|
|
{labels: []string{"name", "foo"}, alert: ¬ifier.Alert{State: notifier.StateFiring}},
|
2020-04-06 11:44:03 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
2020-06-01 10:46:37 +00:00
|
|
|
newTestAlertingRule("single-firing=>inactive=>firing=>inactive", 0),
|
2020-04-06 11:44:03 +00:00
|
|
|
[][]datasource.Metric{
|
2020-05-04 21:51:22 +00:00
|
|
|
{metricWithLabels(t, "name", "foo")},
|
2020-04-06 11:44:03 +00:00
|
|
|
{},
|
2020-05-04 21:51:22 +00:00
|
|
|
{metricWithLabels(t, "name", "foo")},
|
2020-04-06 11:44:03 +00:00
|
|
|
{},
|
|
|
|
},
|
2021-10-22 09:30:38 +00:00
|
|
|
[]testAlert{
|
|
|
|
{labels: []string{"name", "foo"}, alert: ¬ifier.Alert{State: notifier.StateInactive}},
|
2020-04-06 11:44:03 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
2020-06-01 10:46:37 +00:00
|
|
|
newTestAlertingRule("single-firing=>inactive=>firing=>inactive=>empty", 0),
|
2020-04-06 11:44:03 +00:00
|
|
|
[][]datasource.Metric{
|
2020-05-04 21:51:22 +00:00
|
|
|
{metricWithLabels(t, "name", "foo")},
|
2020-04-06 11:44:03 +00:00
|
|
|
{},
|
2020-05-04 21:51:22 +00:00
|
|
|
{metricWithLabels(t, "name", "foo")},
|
2020-04-06 11:44:03 +00:00
|
|
|
{},
|
|
|
|
{},
|
|
|
|
},
|
2021-10-22 09:30:38 +00:00
|
|
|
nil,
|
2020-04-06 11:44:03 +00:00
|
|
|
},
|
|
|
|
{
|
2020-06-01 10:46:37 +00:00
|
|
|
newTestAlertingRule("single-firing=>inactive=>firing=>inactive=>empty=>firing", 0),
|
2020-04-06 11:44:03 +00:00
|
|
|
[][]datasource.Metric{
|
2020-05-04 21:51:22 +00:00
|
|
|
{metricWithLabels(t, "name", "foo")},
|
2020-04-06 11:44:03 +00:00
|
|
|
{},
|
2020-05-04 21:51:22 +00:00
|
|
|
{metricWithLabels(t, "name", "foo")},
|
2020-04-06 11:44:03 +00:00
|
|
|
{},
|
|
|
|
{},
|
2020-05-04 21:51:22 +00:00
|
|
|
{metricWithLabels(t, "name", "foo")},
|
2020-04-06 11:44:03 +00:00
|
|
|
},
|
2021-10-22 09:30:38 +00:00
|
|
|
[]testAlert{
|
|
|
|
{labels: []string{"name", "foo"}, alert: ¬ifier.Alert{State: notifier.StateFiring}},
|
2020-04-06 11:44:03 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
2020-06-01 10:46:37 +00:00
|
|
|
newTestAlertingRule("multiple-firing", 0),
|
2020-04-06 11:44:03 +00:00
|
|
|
[][]datasource.Metric{
|
|
|
|
{
|
2020-05-04 21:51:22 +00:00
|
|
|
metricWithLabels(t, "name", "foo"),
|
|
|
|
metricWithLabels(t, "name", "foo1"),
|
|
|
|
metricWithLabels(t, "name", "foo2"),
|
2020-04-06 11:44:03 +00:00
|
|
|
},
|
|
|
|
},
|
2021-10-22 09:30:38 +00:00
|
|
|
[]testAlert{
|
|
|
|
{labels: []string{"name", "foo"}, alert: ¬ifier.Alert{State: notifier.StateFiring}},
|
|
|
|
{labels: []string{"name", "foo1"}, alert: ¬ifier.Alert{State: notifier.StateFiring}},
|
|
|
|
{labels: []string{"name", "foo2"}, alert: ¬ifier.Alert{State: notifier.StateFiring}},
|
2020-04-06 11:44:03 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
2020-06-01 10:46:37 +00:00
|
|
|
newTestAlertingRule("multiple-steps-firing", 0),
|
2020-04-06 11:44:03 +00:00
|
|
|
[][]datasource.Metric{
|
2020-05-04 21:51:22 +00:00
|
|
|
{metricWithLabels(t, "name", "foo")},
|
|
|
|
{metricWithLabels(t, "name", "foo1")},
|
|
|
|
{metricWithLabels(t, "name", "foo2")},
|
2020-04-06 11:44:03 +00:00
|
|
|
},
|
|
|
|
// 1: fire first alert
|
|
|
|
// 2: fire second alert, set first inactive
|
|
|
|
// 3: fire third alert, set second inactive, delete first one
|
2021-10-22 09:30:38 +00:00
|
|
|
[]testAlert{
|
|
|
|
{labels: []string{"name", "foo1"}, alert: ¬ifier.Alert{State: notifier.StateInactive}},
|
|
|
|
{labels: []string{"name", "foo2"}, alert: ¬ifier.Alert{State: notifier.StateFiring}},
|
2020-04-06 11:44:03 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
2020-06-01 10:46:37 +00:00
|
|
|
newTestAlertingRule("for-pending", time.Minute),
|
2020-04-06 11:44:03 +00:00
|
|
|
[][]datasource.Metric{
|
2020-05-04 21:51:22 +00:00
|
|
|
{metricWithLabels(t, "name", "foo")},
|
2020-04-06 11:44:03 +00:00
|
|
|
},
|
2021-10-22 09:30:38 +00:00
|
|
|
[]testAlert{
|
|
|
|
{labels: []string{"name", "foo"}, alert: ¬ifier.Alert{State: notifier.StatePending}},
|
2020-04-06 11:44:03 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
2020-06-09 12:21:20 +00:00
|
|
|
newTestAlertingRule("for-fired", defaultStep),
|
2020-04-06 11:44:03 +00:00
|
|
|
[][]datasource.Metric{
|
2020-05-04 21:51:22 +00:00
|
|
|
{metricWithLabels(t, "name", "foo")},
|
|
|
|
{metricWithLabels(t, "name", "foo")},
|
2020-04-06 11:44:03 +00:00
|
|
|
},
|
2021-10-22 09:30:38 +00:00
|
|
|
[]testAlert{
|
|
|
|
{labels: []string{"name", "foo"}, alert: ¬ifier.Alert{State: notifier.StateFiring}},
|
2020-04-06 11:44:03 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
2020-06-01 10:46:37 +00:00
|
|
|
newTestAlertingRule("for-pending=>empty", time.Second),
|
2020-04-06 11:44:03 +00:00
|
|
|
[][]datasource.Metric{
|
2020-05-04 21:51:22 +00:00
|
|
|
{metricWithLabels(t, "name", "foo")},
|
|
|
|
{metricWithLabels(t, "name", "foo")},
|
2020-05-17 14:13:22 +00:00
|
|
|
// empty step to reset and delete pending alerts
|
2020-04-06 11:44:03 +00:00
|
|
|
{},
|
|
|
|
},
|
2021-10-22 09:30:38 +00:00
|
|
|
nil,
|
2020-04-06 11:44:03 +00:00
|
|
|
},
|
|
|
|
{
|
2020-06-09 12:21:20 +00:00
|
|
|
newTestAlertingRule("for-pending=>firing=>inactive", defaultStep),
|
2020-04-06 11:44:03 +00:00
|
|
|
[][]datasource.Metric{
|
2020-05-04 21:51:22 +00:00
|
|
|
{metricWithLabels(t, "name", "foo")},
|
|
|
|
{metricWithLabels(t, "name", "foo")},
|
2020-04-06 11:44:03 +00:00
|
|
|
// empty step to reset pending alerts
|
|
|
|
{},
|
|
|
|
},
|
2021-10-22 09:30:38 +00:00
|
|
|
[]testAlert{
|
|
|
|
{labels: []string{"name", "foo"}, alert: ¬ifier.Alert{State: notifier.StateInactive}},
|
2020-04-06 11:44:03 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
2020-06-09 12:21:20 +00:00
|
|
|
newTestAlertingRule("for-pending=>firing=>inactive=>pending", defaultStep),
|
2020-04-06 11:44:03 +00:00
|
|
|
[][]datasource.Metric{
|
2020-06-09 12:21:20 +00:00
|
|
|
{metricWithLabels(t, "name", "foo")},
|
|
|
|
{metricWithLabels(t, "name", "foo")},
|
2020-04-06 11:44:03 +00:00
|
|
|
// empty step to reset pending alerts
|
|
|
|
{},
|
2020-05-04 21:51:22 +00:00
|
|
|
{metricWithLabels(t, "name", "foo")},
|
2020-04-06 11:44:03 +00:00
|
|
|
},
|
2021-10-22 09:30:38 +00:00
|
|
|
[]testAlert{
|
|
|
|
{labels: []string{"name", "foo"}, alert: ¬ifier.Alert{State: notifier.StatePending}},
|
2020-04-06 11:44:03 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
2020-06-09 12:21:20 +00:00
|
|
|
newTestAlertingRule("for-pending=>firing=>inactive=>pending=>firing", defaultStep),
|
2020-04-06 11:44:03 +00:00
|
|
|
[][]datasource.Metric{
|
2020-05-04 21:51:22 +00:00
|
|
|
{metricWithLabels(t, "name", "foo")},
|
|
|
|
{metricWithLabels(t, "name", "foo")},
|
2020-04-06 11:44:03 +00:00
|
|
|
// empty step to reset pending alerts
|
|
|
|
{},
|
2020-05-04 21:51:22 +00:00
|
|
|
{metricWithLabels(t, "name", "foo")},
|
|
|
|
{metricWithLabels(t, "name", "foo")},
|
2020-04-06 11:44:03 +00:00
|
|
|
},
|
2021-10-22 09:30:38 +00:00
|
|
|
[]testAlert{
|
|
|
|
{labels: []string{"name", "foo"}, alert: ¬ifier.Alert{State: notifier.StateFiring}},
|
2020-04-06 11:44:03 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
2020-05-04 21:51:22 +00:00
|
|
|
fakeGroup := Group{Name: "TestRule_Exec"}
|
2020-04-06 11:44:03 +00:00
|
|
|
for _, tc := range testCases {
|
|
|
|
t.Run(tc.rule.Name, func(t *testing.T) {
|
|
|
|
fq := &fakeQuerier{}
|
2021-04-28 20:41:15 +00:00
|
|
|
tc.rule.q = fq
|
2020-06-01 10:46:37 +00:00
|
|
|
tc.rule.GroupID = fakeGroup.ID()
|
2020-04-06 11:44:03 +00:00
|
|
|
for _, step := range tc.steps {
|
|
|
|
fq.reset()
|
2020-05-04 21:51:22 +00:00
|
|
|
fq.add(step...)
|
2021-06-09 09:20:38 +00:00
|
|
|
if _, err := tc.rule.Exec(context.TODO()); err != nil {
|
2020-04-06 11:44:03 +00:00
|
|
|
t.Fatalf("unexpected err: %s", err)
|
|
|
|
}
|
|
|
|
// artificial delay between applying steps
|
2020-06-09 12:21:20 +00:00
|
|
|
time.Sleep(defaultStep)
|
2020-04-06 11:44:03 +00:00
|
|
|
}
|
|
|
|
if len(tc.rule.alerts) != len(tc.expAlerts) {
|
|
|
|
t.Fatalf("expected %d alerts; got %d", len(tc.expAlerts), len(tc.rule.alerts))
|
|
|
|
}
|
2021-10-22 09:30:38 +00:00
|
|
|
expAlerts := make(map[uint64]*notifier.Alert)
|
|
|
|
for _, ta := range tc.expAlerts {
|
|
|
|
labels := ta.labels
|
|
|
|
labels = append(labels, alertNameLabel)
|
|
|
|
labels = append(labels, tc.rule.Name)
|
|
|
|
h := hash(metricWithLabels(t, labels...))
|
|
|
|
expAlerts[h] = ta.alert
|
|
|
|
}
|
|
|
|
for key, exp := range expAlerts {
|
2020-04-06 11:44:03 +00:00
|
|
|
got, ok := tc.rule.alerts[key]
|
|
|
|
if !ok {
|
|
|
|
t.Fatalf("expected to have key %d", key)
|
|
|
|
}
|
|
|
|
if got.State != exp.State {
|
|
|
|
t.Fatalf("expected state %d; got %d", exp.State, got.State)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-06-09 09:20:38 +00:00
|
|
|
func TestAlertingRule_ExecRange(t *testing.T) {
|
|
|
|
testCases := []struct {
|
|
|
|
rule *AlertingRule
|
|
|
|
data []datasource.Metric
|
|
|
|
expAlerts []*notifier.Alert
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
newTestAlertingRule("empty", 0),
|
|
|
|
[]datasource.Metric{},
|
|
|
|
nil,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
newTestAlertingRule("empty labels", 0),
|
|
|
|
[]datasource.Metric{
|
|
|
|
{Values: []float64{1}, Timestamps: []int64{1}},
|
|
|
|
},
|
|
|
|
[]*notifier.Alert{
|
|
|
|
{State: notifier.StateFiring},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
newTestAlertingRule("single-firing", 0),
|
|
|
|
[]datasource.Metric{
|
|
|
|
metricWithLabels(t, "name", "foo"),
|
|
|
|
},
|
|
|
|
[]*notifier.Alert{
|
|
|
|
{
|
|
|
|
Labels: map[string]string{"name": "foo"},
|
|
|
|
State: notifier.StateFiring,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
newTestAlertingRule("single-firing-on-range", 0),
|
|
|
|
[]datasource.Metric{
|
|
|
|
{Values: []float64{1, 1, 1}, Timestamps: []int64{1e3, 2e3, 3e3}},
|
|
|
|
},
|
|
|
|
[]*notifier.Alert{
|
|
|
|
{State: notifier.StateFiring},
|
|
|
|
{State: notifier.StateFiring},
|
|
|
|
{State: notifier.StateFiring},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
newTestAlertingRule("for-pending", time.Second),
|
|
|
|
[]datasource.Metric{
|
|
|
|
{Values: []float64{1, 1, 1}, Timestamps: []int64{1, 3, 5}},
|
|
|
|
},
|
|
|
|
[]*notifier.Alert{
|
|
|
|
{State: notifier.StatePending, Start: time.Unix(1, 0)},
|
|
|
|
{State: notifier.StatePending, Start: time.Unix(3, 0)},
|
|
|
|
{State: notifier.StatePending, Start: time.Unix(5, 0)},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
newTestAlertingRule("for-firing", 3*time.Second),
|
|
|
|
[]datasource.Metric{
|
|
|
|
{Values: []float64{1, 1, 1}, Timestamps: []int64{1, 3, 5}},
|
|
|
|
},
|
|
|
|
[]*notifier.Alert{
|
|
|
|
{State: notifier.StatePending, Start: time.Unix(1, 0)},
|
|
|
|
{State: notifier.StatePending, Start: time.Unix(1, 0)},
|
|
|
|
{State: notifier.StateFiring, Start: time.Unix(1, 0)},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
newTestAlertingRule("for=>pending=>firing=>pending=>firing=>pending", time.Second),
|
|
|
|
[]datasource.Metric{
|
|
|
|
{Values: []float64{1, 1, 1, 1, 1}, Timestamps: []int64{1, 2, 5, 6, 20}},
|
|
|
|
},
|
|
|
|
[]*notifier.Alert{
|
|
|
|
{State: notifier.StatePending, Start: time.Unix(1, 0)},
|
|
|
|
{State: notifier.StateFiring, Start: time.Unix(1, 0)},
|
|
|
|
{State: notifier.StatePending, Start: time.Unix(5, 0)},
|
|
|
|
{State: notifier.StateFiring, Start: time.Unix(5, 0)},
|
|
|
|
{State: notifier.StatePending, Start: time.Unix(20, 0)},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
newTestAlertingRule("multi-series-for=>pending=>pending=>firing", 3*time.Second),
|
|
|
|
[]datasource.Metric{
|
|
|
|
{Values: []float64{1, 1, 1}, Timestamps: []int64{1, 3, 5}},
|
|
|
|
{Values: []float64{1, 1}, Timestamps: []int64{1, 5},
|
|
|
|
Labels: []datasource.Label{{Name: "foo", Value: "bar"}},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
[]*notifier.Alert{
|
|
|
|
{State: notifier.StatePending, Start: time.Unix(1, 0)},
|
|
|
|
{State: notifier.StatePending, Start: time.Unix(1, 0)},
|
|
|
|
{State: notifier.StateFiring, Start: time.Unix(1, 0)},
|
|
|
|
//
|
|
|
|
{State: notifier.StatePending, Start: time.Unix(1, 0),
|
|
|
|
Labels: map[string]string{
|
|
|
|
"foo": "bar",
|
|
|
|
}},
|
|
|
|
{State: notifier.StatePending, Start: time.Unix(5, 0),
|
|
|
|
Labels: map[string]string{
|
|
|
|
"foo": "bar",
|
|
|
|
}},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
newTestRuleWithLabels("multi-series-firing", "source", "vm"),
|
|
|
|
[]datasource.Metric{
|
|
|
|
{Values: []float64{1, 1}, Timestamps: []int64{1, 100}},
|
|
|
|
{Values: []float64{1, 1}, Timestamps: []int64{1, 5},
|
|
|
|
Labels: []datasource.Label{{Name: "foo", Value: "bar"}},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
[]*notifier.Alert{
|
|
|
|
{State: notifier.StateFiring, Labels: map[string]string{
|
|
|
|
"source": "vm",
|
|
|
|
}},
|
|
|
|
{State: notifier.StateFiring, Labels: map[string]string{
|
|
|
|
"source": "vm",
|
|
|
|
}},
|
|
|
|
//
|
|
|
|
{State: notifier.StateFiring, Labels: map[string]string{
|
|
|
|
"foo": "bar",
|
|
|
|
"source": "vm",
|
|
|
|
}},
|
|
|
|
{State: notifier.StateFiring, Labels: map[string]string{
|
|
|
|
"foo": "bar",
|
|
|
|
"source": "vm",
|
|
|
|
}},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
fakeGroup := Group{Name: "TestRule_ExecRange"}
|
|
|
|
for _, tc := range testCases {
|
|
|
|
t.Run(tc.rule.Name, func(t *testing.T) {
|
|
|
|
fq := &fakeQuerier{}
|
|
|
|
tc.rule.q = fq
|
|
|
|
tc.rule.GroupID = fakeGroup.ID()
|
|
|
|
fq.add(tc.data...)
|
|
|
|
gotTS, err := tc.rule.ExecRange(context.TODO(), time.Now(), time.Now())
|
|
|
|
if err != nil {
|
|
|
|
t.Fatalf("unexpected err: %s", err)
|
|
|
|
}
|
|
|
|
var expTS []prompbmarshal.TimeSeries
|
|
|
|
var j int
|
|
|
|
for _, series := range tc.data {
|
|
|
|
for _, timestamp := range series.Timestamps {
|
2021-10-22 09:30:38 +00:00
|
|
|
a := tc.expAlerts[j]
|
|
|
|
if a.Labels == nil {
|
|
|
|
a.Labels = make(map[string]string)
|
|
|
|
}
|
|
|
|
a.Labels[alertNameLabel] = tc.rule.Name
|
2021-06-09 09:20:38 +00:00
|
|
|
expTS = append(expTS, tc.rule.alertToTimeSeries(tc.expAlerts[j], timestamp)...)
|
|
|
|
j++
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if len(gotTS) != len(expTS) {
|
|
|
|
t.Fatalf("expected %d time series; got %d", len(expTS), len(gotTS))
|
|
|
|
}
|
|
|
|
for i := range expTS {
|
|
|
|
got, exp := gotTS[i], expTS[i]
|
|
|
|
if !reflect.DeepEqual(got, exp) {
|
|
|
|
t.Fatalf("%d: expected \n%v but got \n%v", i, exp, got)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-06-01 10:46:37 +00:00
|
|
|
func TestAlertingRule_Restore(t *testing.T) {
|
2020-05-04 21:51:22 +00:00
|
|
|
testCases := []struct {
|
2020-06-01 10:46:37 +00:00
|
|
|
rule *AlertingRule
|
2020-05-04 21:51:22 +00:00
|
|
|
metrics []datasource.Metric
|
|
|
|
expAlerts map[uint64]*notifier.Alert
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
newTestRuleWithLabels("no extra labels"),
|
|
|
|
[]datasource.Metric{
|
|
|
|
metricWithValueAndLabels(t, float64(time.Now().Truncate(time.Hour).Unix()),
|
|
|
|
"__name__", alertForStateMetricName,
|
|
|
|
),
|
|
|
|
},
|
|
|
|
map[uint64]*notifier.Alert{
|
|
|
|
hash(datasource.Metric{}): {State: notifier.StatePending,
|
|
|
|
Start: time.Now().Truncate(time.Hour)},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
newTestRuleWithLabels("metric labels"),
|
|
|
|
[]datasource.Metric{
|
|
|
|
metricWithValueAndLabels(t, float64(time.Now().Truncate(time.Hour).Unix()),
|
|
|
|
"__name__", alertForStateMetricName,
|
2021-10-22 09:30:38 +00:00
|
|
|
alertNameLabel, "metric labels",
|
2020-10-30 08:18:20 +00:00
|
|
|
alertGroupNameLabel, "groupID",
|
2020-05-04 21:51:22 +00:00
|
|
|
"foo", "bar",
|
|
|
|
"namespace", "baz",
|
|
|
|
),
|
|
|
|
},
|
|
|
|
map[uint64]*notifier.Alert{
|
|
|
|
hash(metricWithLabels(t,
|
2021-10-22 09:30:38 +00:00
|
|
|
alertNameLabel, "metric labels",
|
|
|
|
alertGroupNameLabel, "groupID",
|
2020-05-04 21:51:22 +00:00
|
|
|
"foo", "bar",
|
|
|
|
"namespace", "baz",
|
|
|
|
)): {State: notifier.StatePending,
|
|
|
|
Start: time.Now().Truncate(time.Hour)},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
newTestRuleWithLabels("rule labels", "source", "vm"),
|
|
|
|
[]datasource.Metric{
|
|
|
|
metricWithValueAndLabels(t, float64(time.Now().Truncate(time.Hour).Unix()),
|
|
|
|
"__name__", alertForStateMetricName,
|
|
|
|
"foo", "bar",
|
|
|
|
"namespace", "baz",
|
2020-11-09 22:27:32 +00:00
|
|
|
// extra labels set by rule
|
2020-05-04 21:51:22 +00:00
|
|
|
"source", "vm",
|
|
|
|
),
|
|
|
|
},
|
|
|
|
map[uint64]*notifier.Alert{
|
|
|
|
hash(metricWithLabels(t,
|
|
|
|
"foo", "bar",
|
|
|
|
"namespace", "baz",
|
2020-11-09 22:27:32 +00:00
|
|
|
"source", "vm",
|
2020-05-04 21:51:22 +00:00
|
|
|
)): {State: notifier.StatePending,
|
|
|
|
Start: time.Now().Truncate(time.Hour)},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
newTestRuleWithLabels("multiple alerts"),
|
|
|
|
[]datasource.Metric{
|
|
|
|
metricWithValueAndLabels(t, float64(time.Now().Truncate(time.Hour).Unix()),
|
|
|
|
"__name__", alertForStateMetricName,
|
|
|
|
"host", "localhost-1",
|
|
|
|
),
|
|
|
|
metricWithValueAndLabels(t, float64(time.Now().Truncate(2*time.Hour).Unix()),
|
|
|
|
"__name__", alertForStateMetricName,
|
|
|
|
"host", "localhost-2",
|
|
|
|
),
|
|
|
|
metricWithValueAndLabels(t, float64(time.Now().Truncate(3*time.Hour).Unix()),
|
|
|
|
"__name__", alertForStateMetricName,
|
|
|
|
"host", "localhost-3",
|
|
|
|
),
|
|
|
|
},
|
|
|
|
map[uint64]*notifier.Alert{
|
|
|
|
hash(metricWithLabels(t, "host", "localhost-1")): {State: notifier.StatePending,
|
|
|
|
Start: time.Now().Truncate(time.Hour)},
|
|
|
|
hash(metricWithLabels(t, "host", "localhost-2")): {State: notifier.StatePending,
|
|
|
|
Start: time.Now().Truncate(2 * time.Hour)},
|
|
|
|
hash(metricWithLabels(t, "host", "localhost-3")): {State: notifier.StatePending,
|
|
|
|
Start: time.Now().Truncate(3 * time.Hour)},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
fakeGroup := Group{Name: "TestRule_Exec"}
|
|
|
|
for _, tc := range testCases {
|
|
|
|
t.Run(tc.rule.Name, func(t *testing.T) {
|
|
|
|
fq := &fakeQuerier{}
|
2020-06-01 10:46:37 +00:00
|
|
|
tc.rule.GroupID = fakeGroup.ID()
|
2021-04-28 20:41:15 +00:00
|
|
|
tc.rule.q = fq
|
2020-05-04 21:51:22 +00:00
|
|
|
fq.add(tc.metrics...)
|
2020-07-28 11:20:31 +00:00
|
|
|
if err := tc.rule.Restore(context.TODO(), fq, time.Hour, nil); err != nil {
|
2020-05-04 21:51:22 +00:00
|
|
|
t.Fatalf("unexpected err: %s", err)
|
|
|
|
}
|
|
|
|
if len(tc.rule.alerts) != len(tc.expAlerts) {
|
|
|
|
t.Fatalf("expected %d alerts; got %d", len(tc.expAlerts), len(tc.rule.alerts))
|
|
|
|
}
|
|
|
|
for key, exp := range tc.expAlerts {
|
|
|
|
got, ok := tc.rule.alerts[key]
|
|
|
|
if !ok {
|
|
|
|
t.Fatalf("expected to have key %d", key)
|
|
|
|
}
|
|
|
|
if got.State != exp.State {
|
|
|
|
t.Fatalf("expected state %d; got %d", exp.State, got.State)
|
|
|
|
}
|
|
|
|
if got.Start != exp.Start {
|
|
|
|
t.Fatalf("expected Start %v; got %v", exp.Start, got.Start)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-11-09 22:27:32 +00:00
|
|
|
func TestAlertingRule_Exec_Negative(t *testing.T) {
|
|
|
|
fq := &fakeQuerier{}
|
|
|
|
ar := newTestAlertingRule("test", 0)
|
|
|
|
ar.Labels = map[string]string{"job": "test"}
|
2021-04-28 20:41:15 +00:00
|
|
|
ar.q = fq
|
2020-11-09 22:27:32 +00:00
|
|
|
|
|
|
|
// successful attempt
|
|
|
|
fq.add(metricWithValueAndLabels(t, 1, "__name__", "foo", "job", "bar"))
|
2021-06-09 09:20:38 +00:00
|
|
|
_, err := ar.Exec(context.TODO())
|
2020-11-09 22:27:32 +00:00
|
|
|
if err != nil {
|
|
|
|
t.Fatal(err)
|
|
|
|
}
|
|
|
|
|
|
|
|
// label `job` will collide with rule extra label and will make both time series equal
|
|
|
|
fq.add(metricWithValueAndLabels(t, 1, "__name__", "foo", "job", "baz"))
|
2021-06-09 09:20:38 +00:00
|
|
|
_, err = ar.Exec(context.TODO())
|
2020-11-09 22:27:32 +00:00
|
|
|
if !errors.Is(err, errDuplicate) {
|
|
|
|
t.Fatalf("expected to have %s error; got %s", errDuplicate, err)
|
|
|
|
}
|
|
|
|
|
|
|
|
fq.reset()
|
|
|
|
|
|
|
|
expErr := "connection reset by peer"
|
|
|
|
fq.setErr(errors.New(expErr))
|
2021-06-09 09:20:38 +00:00
|
|
|
_, err = ar.Exec(context.TODO())
|
2020-11-09 22:27:32 +00:00
|
|
|
if err == nil {
|
|
|
|
t.Fatalf("expected to get err; got nil")
|
|
|
|
}
|
|
|
|
if !strings.Contains(err.Error(), expErr) {
|
|
|
|
t.Fatalf("expected to get err %q; got %q insterad", expErr, err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-12-19 12:10:59 +00:00
|
|
|
func TestAlertingRule_Template(t *testing.T) {
|
|
|
|
testCases := []struct {
|
|
|
|
rule *AlertingRule
|
|
|
|
metrics []datasource.Metric
|
|
|
|
expAlerts map[uint64]*notifier.Alert
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
newTestRuleWithLabels("common", "region", "east"),
|
|
|
|
[]datasource.Metric{
|
|
|
|
metricWithValueAndLabels(t, 1, "instance", "foo"),
|
|
|
|
metricWithValueAndLabels(t, 1, "instance", "bar"),
|
|
|
|
},
|
|
|
|
map[uint64]*notifier.Alert{
|
2021-10-22 09:30:38 +00:00
|
|
|
hash(metricWithLabels(t, alertNameLabel, "common", "region", "east", "instance", "foo")): {
|
2020-12-19 12:10:59 +00:00
|
|
|
Annotations: map[string]string{},
|
|
|
|
Labels: map[string]string{
|
2021-10-22 09:30:38 +00:00
|
|
|
alertNameLabel: "common",
|
|
|
|
"region": "east",
|
|
|
|
"instance": "foo",
|
2020-12-19 12:10:59 +00:00
|
|
|
},
|
|
|
|
},
|
2021-10-22 09:30:38 +00:00
|
|
|
hash(metricWithLabels(t, alertNameLabel, "common", "region", "east", "instance", "bar")): {
|
2020-12-19 12:10:59 +00:00
|
|
|
Annotations: map[string]string{},
|
|
|
|
Labels: map[string]string{
|
2021-10-22 09:30:38 +00:00
|
|
|
alertNameLabel: "common",
|
|
|
|
"region": "east",
|
|
|
|
"instance": "bar",
|
2020-12-19 12:10:59 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
&AlertingRule{
|
|
|
|
Name: "override label",
|
|
|
|
Labels: map[string]string{
|
|
|
|
"instance": "{{ $labels.instance }}",
|
|
|
|
"region": "east",
|
|
|
|
},
|
|
|
|
Annotations: map[string]string{
|
|
|
|
"summary": `Too high connection number for "{{ $labels.instance }}" for region {{ $labels.region }}`,
|
|
|
|
"description": `It is {{ $value }} connections for "{{ $labels.instance }}"`,
|
|
|
|
},
|
|
|
|
alerts: make(map[uint64]*notifier.Alert),
|
|
|
|
},
|
|
|
|
[]datasource.Metric{
|
|
|
|
metricWithValueAndLabels(t, 2, "instance", "foo"),
|
|
|
|
metricWithValueAndLabels(t, 10, "instance", "bar"),
|
|
|
|
},
|
|
|
|
map[uint64]*notifier.Alert{
|
2021-10-22 09:30:38 +00:00
|
|
|
hash(metricWithLabels(t, alertNameLabel, "override label", "region", "east", "instance", "foo")): {
|
2020-12-19 12:10:59 +00:00
|
|
|
Labels: map[string]string{
|
2021-10-22 09:30:38 +00:00
|
|
|
alertNameLabel: "override label",
|
|
|
|
"instance": "foo",
|
|
|
|
"region": "east",
|
2020-12-19 12:10:59 +00:00
|
|
|
},
|
|
|
|
Annotations: map[string]string{
|
|
|
|
"summary": `Too high connection number for "foo" for region east`,
|
|
|
|
"description": `It is 2 connections for "foo"`,
|
|
|
|
},
|
|
|
|
},
|
2021-10-22 09:30:38 +00:00
|
|
|
hash(metricWithLabels(t, alertNameLabel, "override label", "region", "east", "instance", "bar")): {
|
2020-12-19 12:10:59 +00:00
|
|
|
Labels: map[string]string{
|
2021-10-22 09:30:38 +00:00
|
|
|
alertNameLabel: "override label",
|
|
|
|
"instance": "bar",
|
|
|
|
"region": "east",
|
2020-12-19 12:10:59 +00:00
|
|
|
},
|
|
|
|
Annotations: map[string]string{
|
|
|
|
"summary": `Too high connection number for "bar" for region east`,
|
|
|
|
"description": `It is 10 connections for "bar"`,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
fakeGroup := Group{Name: "TestRule_Exec"}
|
|
|
|
for _, tc := range testCases {
|
|
|
|
t.Run(tc.rule.Name, func(t *testing.T) {
|
|
|
|
fq := &fakeQuerier{}
|
|
|
|
tc.rule.GroupID = fakeGroup.ID()
|
2021-04-28 20:41:15 +00:00
|
|
|
tc.rule.q = fq
|
2020-12-19 12:10:59 +00:00
|
|
|
fq.add(tc.metrics...)
|
2021-06-09 09:20:38 +00:00
|
|
|
if _, err := tc.rule.Exec(context.TODO()); err != nil {
|
2020-12-19 12:10:59 +00:00
|
|
|
t.Fatalf("unexpected err: %s", err)
|
|
|
|
}
|
|
|
|
for hash, expAlert := range tc.expAlerts {
|
|
|
|
gotAlert := tc.rule.alerts[hash]
|
|
|
|
if gotAlert == nil {
|
|
|
|
t.Fatalf("alert %d is missing; labels: %v; annotations: %v",
|
|
|
|
hash, expAlert.Labels, expAlert.Annotations)
|
|
|
|
}
|
|
|
|
if !reflect.DeepEqual(expAlert.Annotations, gotAlert.Annotations) {
|
|
|
|
t.Fatalf("expected to have annotations %#v; got %#v", expAlert.Annotations, gotAlert.Annotations)
|
|
|
|
}
|
|
|
|
if !reflect.DeepEqual(expAlert.Labels, gotAlert.Labels) {
|
|
|
|
t.Fatalf("expected to have labels %#v; got %#v", expAlert.Labels, gotAlert.Labels)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-06-01 10:46:37 +00:00
|
|
|
func newTestRuleWithLabels(name string, labels ...string) *AlertingRule {
|
|
|
|
r := newTestAlertingRule(name, 0)
|
2020-05-04 21:51:22 +00:00
|
|
|
r.Labels = make(map[string]string)
|
|
|
|
for i := 0; i < len(labels); i += 2 {
|
|
|
|
r.Labels[labels[i]] = labels[i+1]
|
|
|
|
}
|
|
|
|
return r
|
|
|
|
}
|
|
|
|
|
2020-06-01 10:46:37 +00:00
|
|
|
func newTestAlertingRule(name string, waitFor time.Duration) *AlertingRule {
|
2021-06-09 09:20:38 +00:00
|
|
|
return &AlertingRule{Name: name, alerts: make(map[uint64]*notifier.Alert), For: waitFor, EvalInterval: waitFor}
|
2020-05-04 21:51:22 +00:00
|
|
|
}
|