vmalert: correctly calculate alert ID including extra labels (#1734)

Previously, ID for alert entity was generated without alertname or groupname.
This led to collision, when multiple alerting rules within the same group
producing same labelsets. E.g. expr: `sum(metric1) by (job) > 0` and
expr: `sum(metric2) by (job) > 0` could result into same labelset `job: "job"`.

The issue affects only UI and Web API parts of vmalert, because alert ID is used
only for displaying and finding active alerts. It does not affect state restore
procedure, since this label was added right before pushing to remote storage.

The change now adds all extra labels right after receiving response from the datasource.
And removes adding extra labels before pushing to remote storage.

Additionally, change introduces a new flag `Restored` which will be displayed in UI
for alerts which have been restored from remote storage on restart.
This commit is contained in:
Roman Khavronenko 2021-10-22 12:30:38 +03:00 committed by Aliaksandr Valialkin
parent ad40e70d39
commit e0f21d6000
No known key found for this signature in database
GPG key ID: A72BEC6CD3D0DED1
8 changed files with 336 additions and 204 deletions

View file

@ -163,7 +163,13 @@ func (ar *AlertingRule) ExecRange(ctx context.Context, start, end time.Time) ([]
// so the hash key will be consistent on restore
s.SetLabel(k, v)
}
// set additional labels to identify group and rule name
if ar.Name != "" {
s.SetLabel(alertNameLabel, ar.Name)
}
if !*disableAlertGroupLabel && ar.GroupName != "" {
s.SetLabel(alertGroupNameLabel, ar.GroupName)
}
a, err := ar.newAlert(s, time.Time{}, qFn) // initial alert
if err != nil {
return nil, fmt.Errorf("failed to create alert: %s", err)
@ -178,13 +184,11 @@ func (ar *AlertingRule) ExecRange(ctx context.Context, start, end time.Time) ([]
// if alert with For > 0
prevT := time.Time{}
//activeAt := time.Time{}
for i := range s.Values {
at := time.Unix(s.Timestamps[i], 0)
if at.Sub(prevT) > ar.EvalInterval {
// reset to Pending if there are gaps > EvalInterval between DPs
a.State = notifier.StatePending
//activeAt = at
a.Start = at
} else if at.Sub(a.Start) >= ar.For {
a.State = notifier.StateFiring
@ -231,6 +235,14 @@ func (ar *AlertingRule) Exec(ctx context.Context) ([]prompbmarshal.TimeSeries, e
// so the hash key will be consistent on restore
m.SetLabel(k, v)
}
// set additional labels to identify group and rule name
// set additional labels to identify group and rule name
if ar.Name != "" {
m.SetLabel(alertNameLabel, ar.Name)
}
if !*disableAlertGroupLabel && ar.GroupName != "" {
m.SetLabel(alertGroupNameLabel, ar.GroupName)
}
h := hash(m)
if _, ok := updated[h]; ok {
// duplicate may be caused by extra labels
@ -352,11 +364,6 @@ func (ar *AlertingRule) newAlert(m datasource.Metric, start time.Time, qFn notif
Start: start,
Expr: ar.Expr,
}
// label defined here to make override possible by
// time series labels.
if !*disableAlertGroupLabel && ar.GroupName != "" {
a.Labels[alertGroupNameLabel] = ar.GroupName
}
for _, l := range m.Labels {
// drop __name__ to be consistent with Prometheus alerting
if l.Name == "__name__" {
@ -427,6 +434,7 @@ func (ar *AlertingRule) newAlertAPI(a notifier.Alert) *APIAlert {
Annotations: a.Annotations,
State: a.State.String(),
ActiveAt: a.Start,
Restored: a.Restored,
Value: strconv.FormatFloat(a.Value, 'f', -1, 32),
}
if alertURLGeneratorFn != nil {
@ -447,43 +455,42 @@ const (
alertStateLabel = "alertstate"
// alertGroupNameLabel defines the label name attached for generated time series.
// attaching this label may be disabled via `-disableAlertgroupLabel` flag.
alertGroupNameLabel = "alertgroup"
)
// alertToTimeSeries converts the given alert with the given timestamp to timeseries
func (ar *AlertingRule) alertToTimeSeries(a *notifier.Alert, timestamp int64) []prompbmarshal.TimeSeries {
var tss []prompbmarshal.TimeSeries
tss = append(tss, alertToTimeSeries(ar.Name, a, timestamp))
tss = append(tss, alertToTimeSeries(a, timestamp))
if ar.For > 0 {
tss = append(tss, alertForToTimeSeries(ar.Name, a, timestamp))
tss = append(tss, alertForToTimeSeries(a, timestamp))
}
return tss
}
func alertToTimeSeries(name string, a *notifier.Alert, timestamp int64) prompbmarshal.TimeSeries {
func alertToTimeSeries(a *notifier.Alert, timestamp int64) prompbmarshal.TimeSeries {
labels := make(map[string]string)
for k, v := range a.Labels {
labels[k] = v
}
labels["__name__"] = alertMetricName
labels[alertNameLabel] = name
labels[alertStateLabel] = a.State.String()
return newTimeSeries([]float64{1}, []int64{timestamp}, labels)
}
// alertForToTimeSeries returns a timeseries that represents
// state of active alerts, where value is time when alert become active
func alertForToTimeSeries(name string, a *notifier.Alert, timestamp int64) prompbmarshal.TimeSeries {
func alertForToTimeSeries(a *notifier.Alert, timestamp int64) prompbmarshal.TimeSeries {
labels := make(map[string]string)
for k, v := range a.Labels {
labels[k] = v
}
labels["__name__"] = alertForStateMetricName
labels[alertNameLabel] = name
return newTimeSeries([]float64{float64(a.Start.Unix())}, []int64{timestamp}, labels)
}
// Restore restores the state of active alerts basing on previously written timeseries.
// Restore restores the state of active alerts basing on previously written time series.
// Restore restores only Start field. Field State will be always Pending and supposed
// to be updated on next Exec, as well as Value field.
// Only rules with For > 0 will be restored.
@ -511,23 +518,13 @@ func (ar *AlertingRule) Restore(ctx context.Context, q datasource.Querier, lookb
}
for _, m := range qMetrics {
labels := m.Labels
m.Labels = make([]datasource.Label, 0)
// drop all extra labels, so hash key will
// be identical to time series received in Exec
for _, l := range labels {
if l.Name == alertNameLabel || l.Name == alertGroupNameLabel {
continue
}
m.Labels = append(m.Labels, l)
}
a, err := ar.newAlert(m, time.Unix(int64(m.Values[0]), 0), qFn)
if err != nil {
return fmt.Errorf("failed to create alert: %w", err)
}
a.ID = hash(m)
a.State = notifier.StatePending
a.Restored = true
ar.alerts[a.ID] = a
logger.Infof("alert %q (%d) restored to state at %v", a.Name, a.ID, a.Start)
}

View file

@ -27,7 +27,6 @@ func TestAlertingRule_ToTimeSeries(t *testing.T) {
newTimeSeries([]float64{1}, []int64{timestamp.UnixNano()}, map[string]string{
"__name__": alertMetricName,
alertStateLabel: notifier.StateFiring.String(),
alertNameLabel: "instant",
}),
},
},
@ -41,7 +40,6 @@ func TestAlertingRule_ToTimeSeries(t *testing.T) {
newTimeSeries([]float64{1}, []int64{timestamp.UnixNano()}, map[string]string{
"__name__": alertMetricName,
alertStateLabel: notifier.StateFiring.String(),
alertNameLabel: "instant extra labels",
"job": "foo",
"instance": "bar",
}),
@ -57,7 +55,6 @@ func TestAlertingRule_ToTimeSeries(t *testing.T) {
newTimeSeries([]float64{1}, []int64{timestamp.UnixNano()}, map[string]string{
"__name__": alertMetricName,
alertStateLabel: notifier.StateFiring.String(),
alertNameLabel: "instant labels override",
}),
},
},
@ -68,13 +65,11 @@ func TestAlertingRule_ToTimeSeries(t *testing.T) {
newTimeSeries([]float64{1}, []int64{timestamp.UnixNano()}, map[string]string{
"__name__": alertMetricName,
alertStateLabel: notifier.StateFiring.String(),
alertNameLabel: "for",
}),
newTimeSeries([]float64{float64(timestamp.Add(time.Second).Unix())},
[]int64{timestamp.UnixNano()},
map[string]string{
"__name__": alertForStateMetricName,
alertNameLabel: "for",
"__name__": alertForStateMetricName,
}),
},
},
@ -85,13 +80,11 @@ func TestAlertingRule_ToTimeSeries(t *testing.T) {
newTimeSeries([]float64{1}, []int64{timestamp.UnixNano()}, map[string]string{
"__name__": alertMetricName,
alertStateLabel: notifier.StatePending.String(),
alertNameLabel: "for pending",
}),
newTimeSeries([]float64{float64(timestamp.Add(time.Second).Unix())},
[]int64{timestamp.UnixNano()},
map[string]string{
"__name__": alertForStateMetricName,
alertNameLabel: "for pending",
"__name__": alertForStateMetricName,
}),
},
},
@ -109,23 +102,27 @@ func TestAlertingRule_ToTimeSeries(t *testing.T) {
func TestAlertingRule_Exec(t *testing.T) {
const defaultStep = 5 * time.Millisecond
type testAlert struct {
labels []string
alert *notifier.Alert
}
testCases := []struct {
rule *AlertingRule
steps [][]datasource.Metric
expAlerts map[uint64]*notifier.Alert
expAlerts []testAlert
}{
{
newTestAlertingRule("empty", 0),
[][]datasource.Metric{},
map[uint64]*notifier.Alert{},
nil,
},
{
newTestAlertingRule("empty labels", 0),
[][]datasource.Metric{
{datasource.Metric{Values: []float64{1}, Timestamps: []int64{1}}},
},
map[uint64]*notifier.Alert{
hash(datasource.Metric{}): {State: notifier.StateFiring},
[]testAlert{
{alert: &notifier.Alert{State: notifier.StateFiring}},
},
},
{
@ -133,8 +130,8 @@ func TestAlertingRule_Exec(t *testing.T) {
[][]datasource.Metric{
{metricWithLabels(t, "name", "foo")},
},
map[uint64]*notifier.Alert{
hash(metricWithLabels(t, "name", "foo")): {State: notifier.StateFiring},
[]testAlert{
{labels: []string{"name", "foo"}, alert: &notifier.Alert{State: notifier.StateFiring}},
},
},
{
@ -143,8 +140,8 @@ func TestAlertingRule_Exec(t *testing.T) {
{metricWithLabels(t, "name", "foo")},
{},
},
map[uint64]*notifier.Alert{
hash(metricWithLabels(t, "name", "foo")): {State: notifier.StateInactive},
[]testAlert{
{labels: []string{"name", "foo"}, alert: &notifier.Alert{State: notifier.StateInactive}},
},
},
{
@ -154,8 +151,8 @@ func TestAlertingRule_Exec(t *testing.T) {
{},
{metricWithLabels(t, "name", "foo")},
},
map[uint64]*notifier.Alert{
hash(metricWithLabels(t, "name", "foo")): {State: notifier.StateFiring},
[]testAlert{
{labels: []string{"name", "foo"}, alert: &notifier.Alert{State: notifier.StateFiring}},
},
},
{
@ -166,8 +163,8 @@ func TestAlertingRule_Exec(t *testing.T) {
{metricWithLabels(t, "name", "foo")},
{},
},
map[uint64]*notifier.Alert{
hash(metricWithLabels(t, "name", "foo")): {State: notifier.StateInactive},
[]testAlert{
{labels: []string{"name", "foo"}, alert: &notifier.Alert{State: notifier.StateInactive}},
},
},
{
@ -179,7 +176,7 @@ func TestAlertingRule_Exec(t *testing.T) {
{},
{},
},
map[uint64]*notifier.Alert{},
nil,
},
{
newTestAlertingRule("single-firing=>inactive=>firing=>inactive=>empty=>firing", 0),
@ -191,8 +188,8 @@ func TestAlertingRule_Exec(t *testing.T) {
{},
{metricWithLabels(t, "name", "foo")},
},
map[uint64]*notifier.Alert{
hash(metricWithLabels(t, "name", "foo")): {State: notifier.StateFiring},
[]testAlert{
{labels: []string{"name", "foo"}, alert: &notifier.Alert{State: notifier.StateFiring}},
},
},
{
@ -204,10 +201,10 @@ func TestAlertingRule_Exec(t *testing.T) {
metricWithLabels(t, "name", "foo2"),
},
},
map[uint64]*notifier.Alert{
hash(metricWithLabels(t, "name", "foo")): {State: notifier.StateFiring},
hash(metricWithLabels(t, "name", "foo1")): {State: notifier.StateFiring},
hash(metricWithLabels(t, "name", "foo2")): {State: notifier.StateFiring},
[]testAlert{
{labels: []string{"name", "foo"}, alert: &notifier.Alert{State: notifier.StateFiring}},
{labels: []string{"name", "foo1"}, alert: &notifier.Alert{State: notifier.StateFiring}},
{labels: []string{"name", "foo2"}, alert: &notifier.Alert{State: notifier.StateFiring}},
},
},
{
@ -220,9 +217,9 @@ func TestAlertingRule_Exec(t *testing.T) {
// 1: fire first alert
// 2: fire second alert, set first inactive
// 3: fire third alert, set second inactive, delete first one
map[uint64]*notifier.Alert{
hash(metricWithLabels(t, "name", "foo1")): {State: notifier.StateInactive},
hash(metricWithLabels(t, "name", "foo2")): {State: notifier.StateFiring},
[]testAlert{
{labels: []string{"name", "foo1"}, alert: &notifier.Alert{State: notifier.StateInactive}},
{labels: []string{"name", "foo2"}, alert: &notifier.Alert{State: notifier.StateFiring}},
},
},
{
@ -230,8 +227,8 @@ func TestAlertingRule_Exec(t *testing.T) {
[][]datasource.Metric{
{metricWithLabels(t, "name", "foo")},
},
map[uint64]*notifier.Alert{
hash(metricWithLabels(t, "name", "foo")): {State: notifier.StatePending},
[]testAlert{
{labels: []string{"name", "foo"}, alert: &notifier.Alert{State: notifier.StatePending}},
},
},
{
@ -240,8 +237,8 @@ func TestAlertingRule_Exec(t *testing.T) {
{metricWithLabels(t, "name", "foo")},
{metricWithLabels(t, "name", "foo")},
},
map[uint64]*notifier.Alert{
hash(metricWithLabels(t, "name", "foo")): {State: notifier.StateFiring},
[]testAlert{
{labels: []string{"name", "foo"}, alert: &notifier.Alert{State: notifier.StateFiring}},
},
},
{
@ -252,7 +249,7 @@ func TestAlertingRule_Exec(t *testing.T) {
// empty step to reset and delete pending alerts
{},
},
map[uint64]*notifier.Alert{},
nil,
},
{
newTestAlertingRule("for-pending=>firing=>inactive", defaultStep),
@ -262,8 +259,8 @@ func TestAlertingRule_Exec(t *testing.T) {
// empty step to reset pending alerts
{},
},
map[uint64]*notifier.Alert{
hash(metricWithLabels(t, "name", "foo")): {State: notifier.StateInactive},
[]testAlert{
{labels: []string{"name", "foo"}, alert: &notifier.Alert{State: notifier.StateInactive}},
},
},
{
@ -275,8 +272,8 @@ func TestAlertingRule_Exec(t *testing.T) {
{},
{metricWithLabels(t, "name", "foo")},
},
map[uint64]*notifier.Alert{
hash(metricWithLabels(t, "name", "foo")): {State: notifier.StatePending},
[]testAlert{
{labels: []string{"name", "foo"}, alert: &notifier.Alert{State: notifier.StatePending}},
},
},
{
@ -289,8 +286,8 @@ func TestAlertingRule_Exec(t *testing.T) {
{metricWithLabels(t, "name", "foo")},
{metricWithLabels(t, "name", "foo")},
},
map[uint64]*notifier.Alert{
hash(metricWithLabels(t, "name", "foo")): {State: notifier.StateFiring},
[]testAlert{
{labels: []string{"name", "foo"}, alert: &notifier.Alert{State: notifier.StateFiring}},
},
},
}
@ -312,7 +309,15 @@ func TestAlertingRule_Exec(t *testing.T) {
if len(tc.rule.alerts) != len(tc.expAlerts) {
t.Fatalf("expected %d alerts; got %d", len(tc.expAlerts), len(tc.rule.alerts))
}
for key, exp := range tc.expAlerts {
expAlerts := make(map[uint64]*notifier.Alert)
for _, ta := range tc.expAlerts {
labels := ta.labels
labels = append(labels, alertNameLabel)
labels = append(labels, tc.rule.Name)
h := hash(metricWithLabels(t, labels...))
expAlerts[h] = ta.alert
}
for key, exp := range expAlerts {
got, ok := tc.rule.alerts[key]
if !ok {
t.Fatalf("expected to have key %d", key)
@ -468,6 +473,11 @@ func TestAlertingRule_ExecRange(t *testing.T) {
var j int
for _, series := range tc.data {
for _, timestamp := range series.Timestamps {
a := tc.expAlerts[j]
if a.Labels == nil {
a.Labels = make(map[string]string)
}
a.Labels[alertNameLabel] = tc.rule.Name
expTS = append(expTS, tc.rule.alertToTimeSeries(tc.expAlerts[j], timestamp)...)
j++
}
@ -496,7 +506,6 @@ func TestAlertingRule_Restore(t *testing.T) {
[]datasource.Metric{
metricWithValueAndLabels(t, float64(time.Now().Truncate(time.Hour).Unix()),
"__name__", alertForStateMetricName,
alertNameLabel, "",
),
},
map[uint64]*notifier.Alert{
@ -509,7 +518,7 @@ func TestAlertingRule_Restore(t *testing.T) {
[]datasource.Metric{
metricWithValueAndLabels(t, float64(time.Now().Truncate(time.Hour).Unix()),
"__name__", alertForStateMetricName,
alertNameLabel, "",
alertNameLabel, "metric labels",
alertGroupNameLabel, "groupID",
"foo", "bar",
"namespace", "baz",
@ -517,6 +526,8 @@ func TestAlertingRule_Restore(t *testing.T) {
},
map[uint64]*notifier.Alert{
hash(metricWithLabels(t,
alertNameLabel, "metric labels",
alertGroupNameLabel, "groupID",
"foo", "bar",
"namespace", "baz",
)): {State: notifier.StatePending,
@ -528,7 +539,6 @@ func TestAlertingRule_Restore(t *testing.T) {
[]datasource.Metric{
metricWithValueAndLabels(t, float64(time.Now().Truncate(time.Hour).Unix()),
"__name__", alertForStateMetricName,
alertNameLabel, "",
"foo", "bar",
"namespace", "baz",
// extra labels set by rule
@ -645,18 +655,20 @@ func TestAlertingRule_Template(t *testing.T) {
metricWithValueAndLabels(t, 1, "instance", "bar"),
},
map[uint64]*notifier.Alert{
hash(metricWithLabels(t, "region", "east", "instance", "foo")): {
hash(metricWithLabels(t, alertNameLabel, "common", "region", "east", "instance", "foo")): {
Annotations: map[string]string{},
Labels: map[string]string{
"region": "east",
"instance": "foo",
alertNameLabel: "common",
"region": "east",
"instance": "foo",
},
},
hash(metricWithLabels(t, "region", "east", "instance", "bar")): {
hash(metricWithLabels(t, alertNameLabel, "common", "region", "east", "instance", "bar")): {
Annotations: map[string]string{},
Labels: map[string]string{
"region": "east",
"instance": "bar",
alertNameLabel: "common",
"region": "east",
"instance": "bar",
},
},
},
@ -679,20 +691,22 @@ func TestAlertingRule_Template(t *testing.T) {
metricWithValueAndLabels(t, 10, "instance", "bar"),
},
map[uint64]*notifier.Alert{
hash(metricWithLabels(t, "region", "east", "instance", "foo")): {
hash(metricWithLabels(t, alertNameLabel, "override label", "region", "east", "instance", "foo")): {
Labels: map[string]string{
"instance": "foo",
"region": "east",
alertNameLabel: "override label",
"instance": "foo",
"region": "east",
},
Annotations: map[string]string{
"summary": `Too high connection number for "foo" for region east`,
"description": `It is 2 connections for "foo"`,
},
},
hash(metricWithLabels(t, "region", "east", "instance", "bar")): {
hash(metricWithLabels(t, alertNameLabel, "override label", "region", "east", "instance", "bar")): {
Labels: map[string]string{
"instance": "bar",
"region": "east",
alertNameLabel: "override label",
"instance": "bar",
"region": "east",
},
Annotations: map[string]string{
"summary": `Too high connection number for "bar" for region east`,

View file

@ -192,7 +192,14 @@ func TestGroupStart(t *testing.T) {
// add rule labels - see config/testdata/rules1-good.rules
alert1.Labels["label"] = "bar"
alert1.Labels["host"] = inst1
alert1.ID = hash(m1)
// add service labels
alert1.Labels[alertNameLabel] = alert1.Name
alert1.Labels[alertGroupNameLabel] = g.Name
var labels1 []string
for k, v := range alert1.Labels {
labels1 = append(labels1, k, v)
}
alert1.ID = hash(metricWithLabels(t, labels1...))
alert2, err := r.newAlert(m2, time.Now(), nil)
if err != nil {
@ -204,7 +211,14 @@ func TestGroupStart(t *testing.T) {
// add rule labels - see config/testdata/rules1-good.rules
alert2.Labels["label"] = "bar"
alert2.Labels["host"] = inst2
alert2.ID = hash(m2)
// add service labels
alert2.Labels[alertNameLabel] = alert2.Name
alert2.Labels[alertGroupNameLabel] = g.Name
var labels2 []string
for k, v := range alert2.Labels {
labels2 = append(labels2, k, v)
}
alert2.ID = hash(metricWithLabels(t, labels2...))
finished := make(chan struct{})
fs.add(m1)

View file

@ -205,7 +205,8 @@ func compareTimeSeries(t *testing.T, a, b []prompbmarshal.TimeSeries) error {
}*/
}
if len(expTS.Labels) != len(gotTS.Labels) {
return fmt.Errorf("expected number of labels %d; got %d", len(expTS.Labels), len(gotTS.Labels))
return fmt.Errorf("expected number of labels %d (%v); got %d (%v)",
len(expTS.Labels), expTS.Labels, len(gotTS.Labels), gotTS.Labels)
}
for i, exp := range expTS.Labels {
got := gotTS.Labels[i]

View file

@ -34,6 +34,8 @@ type Alert struct {
Value float64
// ID is the unique identifer for the Alert
ID uint64
// Restored is true if Alert was restored after restart
Restored bool
}
// AlertState type indicates the Alert state

View file

@ -51,7 +51,7 @@
<div class="group-heading{% if rNotOk[g.Name] > 0 %} alert-danger{% endif %}" data-bs-target="rules-{%s g.ID %}">
<span class="anchor" id="group-{%s g.ID %}"></span>
<a href="#group-{%s g.ID %}">{%s g.Name %}{% if g.Type != "prometheus" %} ({%s g.Type %}){% endif %} (every {%s g.Interval %})</a>
{% if rNotOk[g.Name] > 0 %}<span class="badge bg-danger" title="Number of rules withs status Error">{%d rNotOk[g.Name] %}</span> {% endif %}
{% if rNotOk[g.Name] > 0 %}<span class="badge bg-danger" title="Number of rules with status Error">{%d rNotOk[g.Name] %}</span> {% endif %}
<span class="badge bg-success" title="Number of rules withs status Ok">{%d rOk[g.Name] %}</span>
<p class="fs-6 fw-lighter">{%s g.File %}</p>
{% if len(g.ExtraFilterLabels) > 0 %}
@ -177,8 +177,11 @@
<span class="ms-1 badge bg-primary">{%s k %}={%s ar.Labels[k] %}</span>
{% endfor %}
</td>
<td><span class="badge {% if ar.State=="firing" %}bg-danger{% else %} bg-warning text-dark{% endif %}">{%s ar.State %}</span></td>
<td>{%s ar.ActiveAt.Format("2006-01-02T15:04:05Z07:00") %}</td>
<td>{%= badgeState(ar.State) %}</td>
<td>
{%s ar.ActiveAt.Format("2006-01-02T15:04:05Z07:00") %}
{% if ar.Restored %}{%= badgeRestored() %}{% endif %}
</td>
<td>{%s ar.Value %}</td>
<td>
<a href="/{%s g.ID %}/{%s ar.ID %}/status">Details</a>
@ -285,4 +288,18 @@
</div>
{%= tpl.Footer() %}
{% endfunc %}
{% func badgeState(state string) %}
{%code
badgeClass := "bg-warning text-dark"
if state == "firing" {
badgeClass = "bg-danger"
}
%}
<span class="badge {%s badgeClass %}">{%s state %}</span>
{% endfunc %}
{% func badgeRestored() %}
<span class="badge bg-warning text-dark" title="Alert state was restored after the service restart from remote storage">restored</span>
{% endfunc %}

View file

@ -190,7 +190,7 @@ func StreamListGroups(qw422016 *qt422016.Writer, groups []APIGroup) {
//line app/vmalert/web.qtpl:54
if rNotOk[g.Name] > 0 {
//line app/vmalert/web.qtpl:54
qw422016.N().S(`<span class="badge bg-danger" title="Number of rules withs status Error">`)
qw422016.N().S(`<span class="badge bg-danger" title="Number of rules with status Error">`)
//line app/vmalert/web.qtpl:54
qw422016.N().D(rNotOk[g.Name])
//line app/vmalert/web.qtpl:54
@ -623,126 +623,125 @@ func StreamListAlerts(qw422016 *qt422016.Writer, groupAlerts []GroupAlerts) {
//line app/vmalert/web.qtpl:178
qw422016.N().S(`
</td>
<td><span class="badge `)
//line app/vmalert/web.qtpl:180
if ar.State == "firing" {
//line app/vmalert/web.qtpl:180
qw422016.N().S(`bg-danger`)
//line app/vmalert/web.qtpl:180
} else {
//line app/vmalert/web.qtpl:180
qw422016.N().S(` bg-warning text-dark`)
//line app/vmalert/web.qtpl:180
}
//line app/vmalert/web.qtpl:180
qw422016.N().S(`">`)
//line app/vmalert/web.qtpl:180
qw422016.E().S(ar.State)
//line app/vmalert/web.qtpl:180
qw422016.N().S(`</span></td>
<td>`)
//line app/vmalert/web.qtpl:181
qw422016.E().S(ar.ActiveAt.Format("2006-01-02T15:04:05Z07:00"))
//line app/vmalert/web.qtpl:181
//line app/vmalert/web.qtpl:180
streambadgeState(qw422016, ar.State)
//line app/vmalert/web.qtpl:180
qw422016.N().S(`</td>
<td>
`)
//line app/vmalert/web.qtpl:182
qw422016.E().S(ar.ActiveAt.Format("2006-01-02T15:04:05Z07:00"))
//line app/vmalert/web.qtpl:182
qw422016.N().S(`
`)
//line app/vmalert/web.qtpl:183
if ar.Restored {
//line app/vmalert/web.qtpl:183
streambadgeRestored(qw422016)
//line app/vmalert/web.qtpl:183
}
//line app/vmalert/web.qtpl:183
qw422016.N().S(`
</td>
<td>`)
//line app/vmalert/web.qtpl:182
//line app/vmalert/web.qtpl:185
qw422016.E().S(ar.Value)
//line app/vmalert/web.qtpl:182
//line app/vmalert/web.qtpl:185
qw422016.N().S(`</td>
<td>
<a href="/`)
//line app/vmalert/web.qtpl:184
//line app/vmalert/web.qtpl:187
qw422016.E().S(g.ID)
//line app/vmalert/web.qtpl:184
//line app/vmalert/web.qtpl:187
qw422016.N().S(`/`)
//line app/vmalert/web.qtpl:184
//line app/vmalert/web.qtpl:187
qw422016.E().S(ar.ID)
//line app/vmalert/web.qtpl:184
//line app/vmalert/web.qtpl:187
qw422016.N().S(`/status">Details</a>
</td>
</tr>
`)
//line app/vmalert/web.qtpl:187
//line app/vmalert/web.qtpl:190
}
//line app/vmalert/web.qtpl:187
//line app/vmalert/web.qtpl:190
qw422016.N().S(`
</tbody>
</table>
`)
//line app/vmalert/web.qtpl:190
//line app/vmalert/web.qtpl:193
}
//line app/vmalert/web.qtpl:190
//line app/vmalert/web.qtpl:193
qw422016.N().S(`
</div>
<br>
`)
//line app/vmalert/web.qtpl:193
//line app/vmalert/web.qtpl:196
}
//line app/vmalert/web.qtpl:193
//line app/vmalert/web.qtpl:196
qw422016.N().S(`
`)
//line app/vmalert/web.qtpl:195
//line app/vmalert/web.qtpl:198
} else {
//line app/vmalert/web.qtpl:195
//line app/vmalert/web.qtpl:198
qw422016.N().S(`
<div>
<p>No items...</p>
</div>
`)
//line app/vmalert/web.qtpl:199
//line app/vmalert/web.qtpl:202
}
//line app/vmalert/web.qtpl:199
//line app/vmalert/web.qtpl:202
qw422016.N().S(`
`)
//line app/vmalert/web.qtpl:201
//line app/vmalert/web.qtpl:204
tpl.StreamFooter(qw422016)
//line app/vmalert/web.qtpl:201
//line app/vmalert/web.qtpl:204
qw422016.N().S(`
`)
//line app/vmalert/web.qtpl:203
//line app/vmalert/web.qtpl:206
}
//line app/vmalert/web.qtpl:203
//line app/vmalert/web.qtpl:206
func WriteListAlerts(qq422016 qtio422016.Writer, groupAlerts []GroupAlerts) {
//line app/vmalert/web.qtpl:203
//line app/vmalert/web.qtpl:206
qw422016 := qt422016.AcquireWriter(qq422016)
//line app/vmalert/web.qtpl:203
//line app/vmalert/web.qtpl:206
StreamListAlerts(qw422016, groupAlerts)
//line app/vmalert/web.qtpl:203
//line app/vmalert/web.qtpl:206
qt422016.ReleaseWriter(qw422016)
//line app/vmalert/web.qtpl:203
//line app/vmalert/web.qtpl:206
}
//line app/vmalert/web.qtpl:203
//line app/vmalert/web.qtpl:206
func ListAlerts(groupAlerts []GroupAlerts) string {
//line app/vmalert/web.qtpl:203
//line app/vmalert/web.qtpl:206
qb422016 := qt422016.AcquireByteBuffer()
//line app/vmalert/web.qtpl:203
//line app/vmalert/web.qtpl:206
WriteListAlerts(qb422016, groupAlerts)
//line app/vmalert/web.qtpl:203
//line app/vmalert/web.qtpl:206
qs422016 := string(qb422016.B)
//line app/vmalert/web.qtpl:203
//line app/vmalert/web.qtpl:206
qt422016.ReleaseByteBuffer(qb422016)
//line app/vmalert/web.qtpl:203
//line app/vmalert/web.qtpl:206
return qs422016
//line app/vmalert/web.qtpl:203
//line app/vmalert/web.qtpl:206
}
//line app/vmalert/web.qtpl:205
func StreamAlert(qw422016 *qt422016.Writer, alert *APIAlert) {
//line app/vmalert/web.qtpl:205
qw422016.N().S(`
`)
//line app/vmalert/web.qtpl:206
tpl.StreamHeader(qw422016, "", navItems)
//line app/vmalert/web.qtpl:206
qw422016.N().S(`
`)
//line app/vmalert/web.qtpl:208
func StreamAlert(qw422016 *qt422016.Writer, alert *APIAlert) {
//line app/vmalert/web.qtpl:208
qw422016.N().S(`
`)
//line app/vmalert/web.qtpl:209
tpl.StreamHeader(qw422016, "", navItems)
//line app/vmalert/web.qtpl:209
qw422016.N().S(`
`)
//line app/vmalert/web.qtpl:211
var labelKeys []string
for k := range alert.Labels {
labelKeys = append(labelKeys, k)
@ -755,28 +754,28 @@ func StreamAlert(qw422016 *qt422016.Writer, alert *APIAlert) {
}
sort.Strings(annotationKeys)
//line app/vmalert/web.qtpl:219
//line app/vmalert/web.qtpl:222
qw422016.N().S(`
<div class="display-6 pb-3 mb-3">`)
//line app/vmalert/web.qtpl:220
//line app/vmalert/web.qtpl:223
qw422016.E().S(alert.Name)
//line app/vmalert/web.qtpl:220
//line app/vmalert/web.qtpl:223
qw422016.N().S(`<span class="ms-2 badge `)
//line app/vmalert/web.qtpl:220
//line app/vmalert/web.qtpl:223
if alert.State == "firing" {
//line app/vmalert/web.qtpl:220
//line app/vmalert/web.qtpl:223
qw422016.N().S(`bg-danger`)
//line app/vmalert/web.qtpl:220
//line app/vmalert/web.qtpl:223
} else {
//line app/vmalert/web.qtpl:220
//line app/vmalert/web.qtpl:223
qw422016.N().S(` bg-warning text-dark`)
//line app/vmalert/web.qtpl:220
//line app/vmalert/web.qtpl:223
}
//line app/vmalert/web.qtpl:220
//line app/vmalert/web.qtpl:223
qw422016.N().S(`">`)
//line app/vmalert/web.qtpl:220
//line app/vmalert/web.qtpl:223
qw422016.E().S(alert.State)
//line app/vmalert/web.qtpl:220
//line app/vmalert/web.qtpl:223
qw422016.N().S(`</span></div>
<div class="container border-bottom p-2">
<div class="row">
@ -785,9 +784,9 @@ func StreamAlert(qw422016 *qt422016.Writer, alert *APIAlert) {
</div>
<div class="col">
`)
//line app/vmalert/web.qtpl:227
//line app/vmalert/web.qtpl:230
qw422016.E().S(alert.ActiveAt.Format("2006-01-02T15:04:05Z07:00"))
//line app/vmalert/web.qtpl:227
//line app/vmalert/web.qtpl:230
qw422016.N().S(`
</div>
</div>
@ -799,9 +798,9 @@ func StreamAlert(qw422016 *qt422016.Writer, alert *APIAlert) {
</div>
<div class="col">
<code><pre>`)
//line app/vmalert/web.qtpl:237
//line app/vmalert/web.qtpl:240
qw422016.E().S(alert.Expression)
//line app/vmalert/web.qtpl:237
//line app/vmalert/web.qtpl:240
qw422016.N().S(`</pre></code>
</div>
</div>
@ -813,23 +812,23 @@ func StreamAlert(qw422016 *qt422016.Writer, alert *APIAlert) {
</div>
<div class="col">
`)
//line app/vmalert/web.qtpl:247
//line app/vmalert/web.qtpl:250
for _, k := range labelKeys {
//line app/vmalert/web.qtpl:247
//line app/vmalert/web.qtpl:250
qw422016.N().S(`
<span class="m-1 badge bg-primary">`)
//line app/vmalert/web.qtpl:248
//line app/vmalert/web.qtpl:251
qw422016.E().S(k)
//line app/vmalert/web.qtpl:248
//line app/vmalert/web.qtpl:251
qw422016.N().S(`=`)
//line app/vmalert/web.qtpl:248
//line app/vmalert/web.qtpl:251
qw422016.E().S(alert.Labels[k])
//line app/vmalert/web.qtpl:248
//line app/vmalert/web.qtpl:251
qw422016.N().S(`</span>
`)
//line app/vmalert/web.qtpl:249
//line app/vmalert/web.qtpl:252
}
//line app/vmalert/web.qtpl:249
//line app/vmalert/web.qtpl:252
qw422016.N().S(`
</div>
</div>
@ -841,24 +840,24 @@ func StreamAlert(qw422016 *qt422016.Writer, alert *APIAlert) {
</div>
<div class="col">
`)
//line app/vmalert/web.qtpl:259
//line app/vmalert/web.qtpl:262
for _, k := range annotationKeys {
//line app/vmalert/web.qtpl:259
//line app/vmalert/web.qtpl:262
qw422016.N().S(`
<b>`)
//line app/vmalert/web.qtpl:260
//line app/vmalert/web.qtpl:263
qw422016.E().S(k)
//line app/vmalert/web.qtpl:260
//line app/vmalert/web.qtpl:263
qw422016.N().S(`:</b><br>
<p>`)
//line app/vmalert/web.qtpl:261
//line app/vmalert/web.qtpl:264
qw422016.E().S(alert.Annotations[k])
//line app/vmalert/web.qtpl:261
//line app/vmalert/web.qtpl:264
qw422016.N().S(`</p>
`)
//line app/vmalert/web.qtpl:262
//line app/vmalert/web.qtpl:265
}
//line app/vmalert/web.qtpl:262
//line app/vmalert/web.qtpl:265
qw422016.N().S(`
</div>
</div>
@ -870,13 +869,13 @@ func StreamAlert(qw422016 *qt422016.Writer, alert *APIAlert) {
</div>
<div class="col">
<a target="_blank" href="/groups#group-`)
//line app/vmalert/web.qtpl:272
//line app/vmalert/web.qtpl:275
qw422016.E().S(alert.GroupID)
//line app/vmalert/web.qtpl:272
//line app/vmalert/web.qtpl:275
qw422016.N().S(`">`)
//line app/vmalert/web.qtpl:272
//line app/vmalert/web.qtpl:275
qw422016.E().S(alert.GroupID)
//line app/vmalert/web.qtpl:272
//line app/vmalert/web.qtpl:275
qw422016.N().S(`</a>
</div>
</div>
@ -888,45 +887,132 @@ func StreamAlert(qw422016 *qt422016.Writer, alert *APIAlert) {
</div>
<div class="col">
<a target="_blank" href="`)
//line app/vmalert/web.qtpl:282
//line app/vmalert/web.qtpl:285
qw422016.E().S(alert.SourceLink)
//line app/vmalert/web.qtpl:282
//line app/vmalert/web.qtpl:285
qw422016.N().S(`">Link</a>
</div>
</div>
</div>
`)
//line app/vmalert/web.qtpl:286
//line app/vmalert/web.qtpl:289
tpl.StreamFooter(qw422016)
//line app/vmalert/web.qtpl:286
//line app/vmalert/web.qtpl:289
qw422016.N().S(`
`)
//line app/vmalert/web.qtpl:288
//line app/vmalert/web.qtpl:291
}
//line app/vmalert/web.qtpl:288
//line app/vmalert/web.qtpl:291
func WriteAlert(qq422016 qtio422016.Writer, alert *APIAlert) {
//line app/vmalert/web.qtpl:288
//line app/vmalert/web.qtpl:291
qw422016 := qt422016.AcquireWriter(qq422016)
//line app/vmalert/web.qtpl:288
//line app/vmalert/web.qtpl:291
StreamAlert(qw422016, alert)
//line app/vmalert/web.qtpl:288
//line app/vmalert/web.qtpl:291
qt422016.ReleaseWriter(qw422016)
//line app/vmalert/web.qtpl:288
//line app/vmalert/web.qtpl:291
}
//line app/vmalert/web.qtpl:288
//line app/vmalert/web.qtpl:291
func Alert(alert *APIAlert) string {
//line app/vmalert/web.qtpl:288
//line app/vmalert/web.qtpl:291
qb422016 := qt422016.AcquireByteBuffer()
//line app/vmalert/web.qtpl:288
//line app/vmalert/web.qtpl:291
WriteAlert(qb422016, alert)
//line app/vmalert/web.qtpl:288
//line app/vmalert/web.qtpl:291
qs422016 := string(qb422016.B)
//line app/vmalert/web.qtpl:288
//line app/vmalert/web.qtpl:291
qt422016.ReleaseByteBuffer(qb422016)
//line app/vmalert/web.qtpl:288
//line app/vmalert/web.qtpl:291
return qs422016
//line app/vmalert/web.qtpl:288
//line app/vmalert/web.qtpl:291
}
//line app/vmalert/web.qtpl:293
func streambadgeState(qw422016 *qt422016.Writer, state string) {
//line app/vmalert/web.qtpl:293
qw422016.N().S(`
`)
//line app/vmalert/web.qtpl:295
badgeClass := "bg-warning text-dark"
if state == "firing" {
badgeClass = "bg-danger"
}
//line app/vmalert/web.qtpl:299
qw422016.N().S(`
<span class="badge `)
//line app/vmalert/web.qtpl:300
qw422016.E().S(badgeClass)
//line app/vmalert/web.qtpl:300
qw422016.N().S(`">`)
//line app/vmalert/web.qtpl:300
qw422016.E().S(state)
//line app/vmalert/web.qtpl:300
qw422016.N().S(`</span>
`)
//line app/vmalert/web.qtpl:301
}
//line app/vmalert/web.qtpl:301
func writebadgeState(qq422016 qtio422016.Writer, state string) {
//line app/vmalert/web.qtpl:301
qw422016 := qt422016.AcquireWriter(qq422016)
//line app/vmalert/web.qtpl:301
streambadgeState(qw422016, state)
//line app/vmalert/web.qtpl:301
qt422016.ReleaseWriter(qw422016)
//line app/vmalert/web.qtpl:301
}
//line app/vmalert/web.qtpl:301
func badgeState(state string) string {
//line app/vmalert/web.qtpl:301
qb422016 := qt422016.AcquireByteBuffer()
//line app/vmalert/web.qtpl:301
writebadgeState(qb422016, state)
//line app/vmalert/web.qtpl:301
qs422016 := string(qb422016.B)
//line app/vmalert/web.qtpl:301
qt422016.ReleaseByteBuffer(qb422016)
//line app/vmalert/web.qtpl:301
return qs422016
//line app/vmalert/web.qtpl:301
}
//line app/vmalert/web.qtpl:303
func streambadgeRestored(qw422016 *qt422016.Writer) {
//line app/vmalert/web.qtpl:303
qw422016.N().S(`
<span class="badge bg-warning text-dark" title="Alert state was restored after reload from remote storage">restored</span>
`)
//line app/vmalert/web.qtpl:305
}
//line app/vmalert/web.qtpl:305
func writebadgeRestored(qq422016 qtio422016.Writer) {
//line app/vmalert/web.qtpl:305
qw422016 := qt422016.AcquireWriter(qq422016)
//line app/vmalert/web.qtpl:305
streambadgeRestored(qw422016)
//line app/vmalert/web.qtpl:305
qt422016.ReleaseWriter(qw422016)
//line app/vmalert/web.qtpl:305
}
//line app/vmalert/web.qtpl:305
func badgeRestored() string {
//line app/vmalert/web.qtpl:305
qb422016 := qt422016.AcquireByteBuffer()
//line app/vmalert/web.qtpl:305
writebadgeRestored(qb422016)
//line app/vmalert/web.qtpl:305
qs422016 := string(qb422016.B)
//line app/vmalert/web.qtpl:305
qt422016.ReleaseByteBuffer(qb422016)
//line app/vmalert/web.qtpl:305
return qs422016
//line app/vmalert/web.qtpl:305
}

View file

@ -18,6 +18,7 @@ type APIAlert struct {
Annotations map[string]string `json:"annotations"`
ActiveAt time.Time `json:"activeAt"`
SourceLink string `json:"source"`
Restored bool `json:"restored"`
}
// APIGroup represents Group for WEB view