diff --git a/app/victoria-metrics/main_test.go b/app/victoria-metrics/main_test.go index 626adecba..c4ac0f393 100644 --- a/app/victoria-metrics/main_test.go +++ b/app/victoria-metrics/main_test.go @@ -369,7 +369,7 @@ func readIn(readFor string, t *testing.T, insertTime time.Time) []test { t.Helper() s := newSuite(t) var tt []test - s.noError(filepath.Walk(filepath.Join(testFixturesDir, readFor), func(path string, info os.FileInfo, err error) error { + s.noError(filepath.Walk(filepath.Join(testFixturesDir, readFor), func(path string, _ os.FileInfo, err error) error { if err != nil { return err } diff --git a/app/vlinsert/elasticsearch/elasticsearch_timing_test.go b/app/vlinsert/elasticsearch/elasticsearch_timing_test.go index 1c4de3cb4..ef62f0e4f 100644 --- a/app/vlinsert/elasticsearch/elasticsearch_timing_test.go +++ b/app/vlinsert/elasticsearch/elasticsearch_timing_test.go @@ -33,7 +33,7 @@ func benchmarkReadBulkRequest(b *testing.B, isGzip bool) { timeField := "@timestamp" msgField := "message" - processLogMessage := func(timestmap int64, fields []logstorage.Field) {} + processLogMessage := func(_ int64, _ []logstorage.Field) {} b.ReportAllocs() b.SetBytes(int64(len(data))) diff --git a/app/vlinsert/loki/loki_json_test.go b/app/vlinsert/loki/loki_json_test.go index 93cf8652a..6eaa88d36 100644 --- a/app/vlinsert/loki/loki_json_test.go +++ b/app/vlinsert/loki/loki_json_test.go @@ -11,7 +11,7 @@ import ( func TestParseJSONRequestFailure(t *testing.T) { f := func(s string) { t.Helper() - n, err := parseJSONRequest([]byte(s), func(timestamp int64, fields []logstorage.Field) { + n, err := parseJSONRequest([]byte(s), func(_ int64, _ []logstorage.Field) { t.Fatalf("unexpected call to parseJSONRequest callback!") }) if err == nil { diff --git a/app/vlinsert/loki/loki_json_timing_test.go b/app/vlinsert/loki/loki_json_timing_test.go index af7c29697..985782c8b 100644 --- a/app/vlinsert/loki/loki_json_timing_test.go +++ b/app/vlinsert/loki/loki_json_timing_test.go @@ -27,7 +27,7 @@ func benchmarkParseJSONRequest(b *testing.B, streams, rows, labels int) { b.RunParallel(func(pb *testing.PB) { data := getJSONBody(streams, rows, labels) for pb.Next() { - _, err := parseJSONRequest(data, func(timestamp int64, fields []logstorage.Field) {}) + _, err := parseJSONRequest(data, func(_ int64, _ []logstorage.Field) {}) if err != nil { panic(fmt.Errorf("unexpected error: %w", err)) } diff --git a/app/vlinsert/loki/loki_protobuf_timing_test.go b/app/vlinsert/loki/loki_protobuf_timing_test.go index 1e4d16048..981951765 100644 --- a/app/vlinsert/loki/loki_protobuf_timing_test.go +++ b/app/vlinsert/loki/loki_protobuf_timing_test.go @@ -29,7 +29,7 @@ func benchmarkParseProtobufRequest(b *testing.B, streams, rows, labels int) { b.RunParallel(func(pb *testing.PB) { body := getProtobufBody(streams, rows, labels) for pb.Next() { - _, err := parseProtobufRequest(body, func(timestamp int64, fields []logstorage.Field) {}) + _, err := parseProtobufRequest(body, func(_ int64, _ []logstorage.Field) {}) if err != nil { panic(fmt.Errorf("unexpected error: %w", err)) } diff --git a/app/vmagent/remotewrite/remotewrite.go b/app/vmagent/remotewrite/remotewrite.go index 8e6de0737..b338c0402 100644 --- a/app/vmagent/remotewrite/remotewrite.go +++ b/app/vmagent/remotewrite/remotewrite.go @@ -1004,7 +1004,7 @@ func getRowsCount(tss []prompbmarshal.TimeSeries) int { // CheckStreamAggrConfigs checks configs pointed by -remoteWrite.streamAggr.config func CheckStreamAggrConfigs() error { - pushNoop := func(tss []prompbmarshal.TimeSeries) {} + pushNoop := func(_ []prompbmarshal.TimeSeries) {} for idx, sasFile := range *streamAggrConfig { if sasFile == "" { continue diff --git a/app/vmalert/datasource/vm_test.go b/app/vmalert/datasource/vm_test.go index 50a03ac28..d2caaf137 100644 --- a/app/vmalert/datasource/vm_test.go +++ b/app/vmalert/datasource/vm_test.go @@ -71,7 +71,7 @@ func TestVMInstantQuery(t *testing.T) { w.Write([]byte(`{"status":"success","data":{"resultType":"scalar","result":[1583786142, "1"]},"stats":{"seriesFetched": "42"}}`)) } }) - mux.HandleFunc("/render", func(w http.ResponseWriter, request *http.Request) { + mux.HandleFunc("/render", func(w http.ResponseWriter, _ *http.Request) { c++ switch c { case 8: diff --git a/app/vmalert/main.go b/app/vmalert/main.go index fbbbffd7b..97e4f8e85 100644 --- a/app/vmalert/main.go +++ b/app/vmalert/main.go @@ -304,7 +304,7 @@ func getAlertURLGenerator(externalURL *url.URL, externalAlertSource string, vali "tpl": externalAlertSource, } return func(alert notifier.Alert) string { - qFn := func(query string) ([]datasource.Metric, error) { + qFn := func(_ string) ([]datasource.Metric, error) { return nil, fmt.Errorf("`query` template isn't supported for alert source template") } templated, err := alert.ExecTemplate(qFn, alert.Labels, m) diff --git a/app/vmalert/notifier/alert_test.go b/app/vmalert/notifier/alert_test.go index c2a51c304..b02678dce 100644 --- a/app/vmalert/notifier/alert_test.go +++ b/app/vmalert/notifier/alert_test.go @@ -178,7 +178,7 @@ func TestAlert_ExecTemplate(t *testing.T) { }, } - qFn := func(q string) ([]datasource.Metric, error) { + qFn := func(_ string) ([]datasource.Metric, error) { return []datasource.Metric{ { Labels: []datasource.Label{ diff --git a/app/vmalert/rule/alerting.go b/app/vmalert/rule/alerting.go index 82bc56f6e..8f6d0e980 100644 --- a/app/vmalert/rule/alerting.go +++ b/app/vmalert/rule/alerting.go @@ -310,7 +310,7 @@ func (ar *AlertingRule) execRange(ctx context.Context, start, end time.Time) ([] } var result []prompbmarshal.TimeSeries holdAlertState := make(map[uint64]*notifier.Alert) - qFn := func(query string) ([]datasource.Metric, error) { + qFn := func(_ string) ([]datasource.Metric, error) { return nil, fmt.Errorf("`query` template isn't supported in replay mode") } for _, s := range res.Data { diff --git a/app/vmalert/templates/template.go b/app/vmalert/templates/template.go index 08d81b00e..8bbdd4c60 100644 --- a/app/vmalert/templates/template.go +++ b/app/vmalert/templates/template.go @@ -476,7 +476,7 @@ func templateFuncs() textTpl.FuncMap { // For example, {{ query "foo" | first | value }} will // execute "/api/v1/query?query=foo" request and will return // the first value in response. - "query": func(q string) ([]metric, error) { + "query": func(_ string) ([]metric, error) { // query function supposed to be substituted at FuncsWithQuery(). // it is present here only for validation purposes, when there is no // provided datasource. diff --git a/app/vmalert/web_test.go b/app/vmalert/web_test.go index cb6d9bdcb..30796e3ad 100644 --- a/app/vmalert/web_test.go +++ b/app/vmalert/web_test.go @@ -36,7 +36,7 @@ func TestHandler(t *testing.T) { }} rh := &requestHandler{m: m} - getResp := func(url string, to interface{}, code int) { + getResp := func(t *testing.T, url string, to interface{}, code int) { t.Helper() resp, err := http.Get(url) if err != nil { @@ -60,43 +60,43 @@ func TestHandler(t *testing.T) { defer ts.Close() t.Run("/", func(t *testing.T) { - getResp(ts.URL, nil, 200) - getResp(ts.URL+"/vmalert", nil, 200) - getResp(ts.URL+"/vmalert/alerts", nil, 200) - getResp(ts.URL+"/vmalert/groups", nil, 200) - getResp(ts.URL+"/vmalert/notifiers", nil, 200) - getResp(ts.URL+"/rules", nil, 200) + getResp(t, ts.URL, nil, 200) + getResp(t, ts.URL+"/vmalert", nil, 200) + getResp(t, ts.URL+"/vmalert/alerts", nil, 200) + getResp(t, ts.URL+"/vmalert/groups", nil, 200) + getResp(t, ts.URL+"/vmalert/notifiers", nil, 200) + getResp(t, ts.URL+"/rules", nil, 200) }) t.Run("/vmalert/rule", func(t *testing.T) { a := ruleToAPI(ar) - getResp(ts.URL+"/vmalert/"+a.WebLink(), nil, 200) + getResp(t, ts.URL+"/vmalert/"+a.WebLink(), nil, 200) r := ruleToAPI(rr) - getResp(ts.URL+"/vmalert/"+r.WebLink(), nil, 200) + getResp(t, ts.URL+"/vmalert/"+r.WebLink(), nil, 200) }) t.Run("/vmalert/alert", func(t *testing.T) { alerts := ruleToAPIAlert(ar) for _, a := range alerts { - getResp(ts.URL+"/vmalert/"+a.WebLink(), nil, 200) + getResp(t, ts.URL+"/vmalert/"+a.WebLink(), nil, 200) } }) t.Run("/vmalert/rule?badParam", func(t *testing.T) { params := fmt.Sprintf("?%s=0&%s=1", paramGroupID, paramRuleID) - getResp(ts.URL+"/vmalert/rule"+params, nil, 404) + getResp(t, ts.URL+"/vmalert/rule"+params, nil, 404) params = fmt.Sprintf("?%s=1&%s=0", paramGroupID, paramRuleID) - getResp(ts.URL+"/vmalert/rule"+params, nil, 404) + getResp(t, ts.URL+"/vmalert/rule"+params, nil, 404) }) t.Run("/api/v1/alerts", func(t *testing.T) { lr := listAlertsResponse{} - getResp(ts.URL+"/api/v1/alerts", &lr, 200) + getResp(t, ts.URL+"/api/v1/alerts", &lr, 200) if length := len(lr.Data.Alerts); length != 1 { t.Errorf("expected 1 alert got %d", length) } lr = listAlertsResponse{} - getResp(ts.URL+"/vmalert/api/v1/alerts", &lr, 200) + getResp(t, ts.URL+"/vmalert/api/v1/alerts", &lr, 200) if length := len(lr.Data.Alerts); length != 1 { t.Errorf("expected 1 alert got %d", length) } @@ -104,13 +104,13 @@ func TestHandler(t *testing.T) { t.Run("/api/v1/alert?alertID&groupID", func(t *testing.T) { expAlert := newAlertAPI(ar, ar.GetAlerts()[0]) alert := &apiAlert{} - getResp(ts.URL+"/"+expAlert.APILink(), alert, 200) + getResp(t, ts.URL+"/"+expAlert.APILink(), alert, 200) if !reflect.DeepEqual(alert, expAlert) { t.Errorf("expected %v is equal to %v", alert, expAlert) } alert = &apiAlert{} - getResp(ts.URL+"/vmalert/"+expAlert.APILink(), alert, 200) + getResp(t, ts.URL+"/vmalert/"+expAlert.APILink(), alert, 200) if !reflect.DeepEqual(alert, expAlert) { t.Errorf("expected %v is equal to %v", alert, expAlert) } @@ -118,28 +118,28 @@ func TestHandler(t *testing.T) { t.Run("/api/v1/alert?badParams", func(t *testing.T) { params := fmt.Sprintf("?%s=0&%s=1", paramGroupID, paramAlertID) - getResp(ts.URL+"/api/v1/alert"+params, nil, 404) - getResp(ts.URL+"/vmalert/api/v1/alert"+params, nil, 404) + getResp(t, ts.URL+"/api/v1/alert"+params, nil, 404) + getResp(t, ts.URL+"/vmalert/api/v1/alert"+params, nil, 404) params = fmt.Sprintf("?%s=1&%s=0", paramGroupID, paramAlertID) - getResp(ts.URL+"/api/v1/alert"+params, nil, 404) - getResp(ts.URL+"/vmalert/api/v1/alert"+params, nil, 404) + getResp(t, ts.URL+"/api/v1/alert"+params, nil, 404) + getResp(t, ts.URL+"/vmalert/api/v1/alert"+params, nil, 404) // bad request, alertID is missing params = fmt.Sprintf("?%s=1", paramGroupID) - getResp(ts.URL+"/api/v1/alert"+params, nil, 400) - getResp(ts.URL+"/vmalert/api/v1/alert"+params, nil, 400) + getResp(t, ts.URL+"/api/v1/alert"+params, nil, 400) + getResp(t, ts.URL+"/vmalert/api/v1/alert"+params, nil, 400) }) t.Run("/api/v1/rules", func(t *testing.T) { lr := listGroupsResponse{} - getResp(ts.URL+"/api/v1/rules", &lr, 200) + getResp(t, ts.URL+"/api/v1/rules", &lr, 200) if length := len(lr.Data.Groups); length != 1 { t.Errorf("expected 1 group got %d", length) } lr = listGroupsResponse{} - getResp(ts.URL+"/vmalert/api/v1/rules", &lr, 200) + getResp(t, ts.URL+"/vmalert/api/v1/rules", &lr, 200) if length := len(lr.Data.Groups); length != 1 { t.Errorf("expected 1 group got %d", length) } @@ -147,21 +147,21 @@ func TestHandler(t *testing.T) { t.Run("/api/v1/rule?ruleID&groupID", func(t *testing.T) { expRule := ruleToAPI(ar) gotRule := apiRule{} - getResp(ts.URL+"/"+expRule.APILink(), &gotRule, 200) + getResp(t, ts.URL+"/"+expRule.APILink(), &gotRule, 200) if expRule.ID != gotRule.ID { t.Errorf("expected to get Rule %q; got %q instead", expRule.ID, gotRule.ID) } gotRule = apiRule{} - getResp(ts.URL+"/vmalert/"+expRule.APILink(), &gotRule, 200) + getResp(t, ts.URL+"/vmalert/"+expRule.APILink(), &gotRule, 200) if expRule.ID != gotRule.ID { t.Errorf("expected to get Rule %q; got %q instead", expRule.ID, gotRule.ID) } gotRuleWithUpdates := apiRuleWithUpdates{} - getResp(ts.URL+"/"+expRule.APILink(), &gotRuleWithUpdates, 200) + getResp(t, ts.URL+"/"+expRule.APILink(), &gotRuleWithUpdates, 200) if gotRuleWithUpdates.StateUpdates == nil || len(gotRuleWithUpdates.StateUpdates) < 1 { t.Fatalf("expected %+v to have state updates field not empty", gotRuleWithUpdates.StateUpdates) } @@ -171,7 +171,7 @@ func TestHandler(t *testing.T) { check := func(url string, expGroups, expRules int) { t.Helper() lr := listGroupsResponse{} - getResp(ts.URL+url, &lr, 200) + getResp(t, ts.URL+url, &lr, 200) if length := len(lr.Data.Groups); length != expGroups { t.Errorf("expected %d groups got %d", expGroups, length) } @@ -210,7 +210,7 @@ func TestHandler(t *testing.T) { t.Run("/api/v1/rules&exclude_alerts=true", func(t *testing.T) { // check if response returns active alerts by default lr := listGroupsResponse{} - getResp(ts.URL+"/api/v1/rules?rule_group[]=group&file[]=rules.yaml", &lr, 200) + getResp(t, ts.URL+"/api/v1/rules?rule_group[]=group&file[]=rules.yaml", &lr, 200) activeAlerts := 0 for _, gr := range lr.Data.Groups { for _, r := range gr.Rules { @@ -223,7 +223,7 @@ func TestHandler(t *testing.T) { // disable returning alerts via param lr = listGroupsResponse{} - getResp(ts.URL+"/api/v1/rules?rule_group[]=group&file[]=rules.yaml&exclude_alerts=true", &lr, 200) + getResp(t, ts.URL+"/api/v1/rules?rule_group[]=group&file[]=rules.yaml&exclude_alerts=true", &lr, 200) activeAlerts = 0 for _, gr := range lr.Data.Groups { for _, r := range gr.Rules { @@ -241,7 +241,7 @@ func TestEmptyResponse(t *testing.T) { ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { rhWithNoGroups.handler(w, r) })) defer ts.Close() - getResp := func(url string, to interface{}, code int) { + getResp := func(t *testing.T, url string, to interface{}, code int) { t.Helper() resp, err := http.Get(url) if err != nil { @@ -264,13 +264,13 @@ func TestEmptyResponse(t *testing.T) { t.Run("no groups /api/v1/alerts", func(t *testing.T) { lr := listAlertsResponse{} - getResp(ts.URL+"/api/v1/alerts", &lr, 200) + getResp(t, ts.URL+"/api/v1/alerts", &lr, 200) if lr.Data.Alerts == nil { t.Errorf("expected /api/v1/alerts response to have non-nil data") } lr = listAlertsResponse{} - getResp(ts.URL+"/vmalert/api/v1/alerts", &lr, 200) + getResp(t, ts.URL+"/vmalert/api/v1/alerts", &lr, 200) if lr.Data.Alerts == nil { t.Errorf("expected /api/v1/alerts response to have non-nil data") } @@ -278,13 +278,13 @@ func TestEmptyResponse(t *testing.T) { t.Run("no groups /api/v1/rules", func(t *testing.T) { lr := listGroupsResponse{} - getResp(ts.URL+"/api/v1/rules", &lr, 200) + getResp(t, ts.URL+"/api/v1/rules", &lr, 200) if lr.Data.Groups == nil { t.Errorf("expected /api/v1/rules response to have non-nil data") } lr = listGroupsResponse{} - getResp(ts.URL+"/vmalert/api/v1/rules", &lr, 200) + getResp(t, ts.URL+"/vmalert/api/v1/rules", &lr, 200) if lr.Data.Groups == nil { t.Errorf("expected /api/v1/rules response to have non-nil data") } @@ -295,13 +295,13 @@ func TestEmptyResponse(t *testing.T) { t.Run("empty group /api/v1/rules", func(t *testing.T) { lr := listGroupsResponse{} - getResp(ts.URL+"/api/v1/rules", &lr, 200) + getResp(t, ts.URL+"/api/v1/rules", &lr, 200) if lr.Data.Groups == nil { t.Fatalf("expected /api/v1/rules response to have non-nil data") } lr = listGroupsResponse{} - getResp(ts.URL+"/vmalert/api/v1/rules", &lr, 200) + getResp(t, ts.URL+"/vmalert/api/v1/rules", &lr, 200) if lr.Data.Groups == nil { t.Fatalf("expected /api/v1/rules response to have non-nil data") } diff --git a/app/vmctl/main.go b/app/vmctl/main.go index 35f69c915..ee2678073 100644 --- a/app/vmctl/main.go +++ b/app/vmctl/main.go @@ -373,7 +373,7 @@ func main() { return cli.Exit(fmt.Errorf("cannot open exported block at path=%q err=%w", blockPath, err), 1) } var blocksCount atomic.Uint64 - if err := stream.Parse(f, isBlockGzipped, func(block *stream.Block) error { + if err := stream.Parse(f, isBlockGzipped, func(_ *stream.Block) error { blocksCount.Add(1) return nil }); err != nil { diff --git a/app/vminsert/common/streamaggr.go b/app/vminsert/common/streamaggr.go index e6f80e503..24ff870b5 100644 --- a/app/vminsert/common/streamaggr.go +++ b/app/vminsert/common/streamaggr.go @@ -54,7 +54,7 @@ func CheckStreamAggrConfig() error { if *streamAggrConfig == "" { return nil } - pushNoop := func(tss []prompbmarshal.TimeSeries) {} + pushNoop := func(_ []prompbmarshal.TimeSeries) {} opts := &streamaggr.Options{ DedupInterval: *streamAggrDedupInterval, DropInputLabels: *streamAggrDropInputLabels, diff --git a/app/vminsert/main.go b/app/vminsert/main.go index 7f2c403de..27c2c5651 100644 --- a/app/vminsert/main.go +++ b/app/vminsert/main.go @@ -101,7 +101,7 @@ func Init() { if len(*opentsdbHTTPListenAddr) > 0 { opentsdbhttpServer = opentsdbhttpserver.MustStart(*opentsdbHTTPListenAddr, *opentsdbHTTPUseProxyProtocol, opentsdbhttp.InsertHandler) } - promscrape.Init(func(at *auth.Token, wr *prompbmarshal.WriteRequest) { + promscrape.Init(func(_ *auth.Token, wr *prompbmarshal.WriteRequest) { prompush.Push(wr) }) } diff --git a/app/vmselect/graphite/eval.go b/app/vmselect/graphite/eval.go index 22526ddd1..69c9e3923 100644 --- a/app/vmselect/graphite/eval.go +++ b/app/vmselect/graphite/eval.go @@ -160,7 +160,7 @@ func newNextSeriesForSearchQuery(ec *evalConfig, sq *storage.SearchQuery, expr g seriesCh := make(chan *series, cgroup.AvailableCPUs()) errCh := make(chan error, 1) go func() { - err := rss.RunParallel(nil, func(rs *netstorage.Result, workerID uint) error { + err := rss.RunParallel(nil, func(rs *netstorage.Result, _ uint) error { nameWithTags := getCanonicalPath(&rs.MetricName) tags := unmarshalTags(nameWithTags) s := &series{ diff --git a/app/vmselect/graphite/transform.go b/app/vmselect/graphite/transform.go index dc16aba04..23ed95b00 100644 --- a/app/vmselect/graphite/transform.go +++ b/app/vmselect/graphite/transform.go @@ -405,7 +405,7 @@ func aggregateSeriesWithWildcards(ec *evalConfig, expr graphiteql.Expr, nextSeri for _, pos := range positions { positionsMap[pos] = struct{}{} } - keyFunc := func(name string, tags map[string]string) string { + keyFunc := func(name string, _ map[string]string) string { parts := strings.Split(getPathFromName(name), ".") dstParts := parts[:0] for i, part := range parts { @@ -1881,7 +1881,7 @@ func transformGroupByTags(ec *evalConfig, fe *graphiteql.FuncExpr) (nextSeriesFu if err != nil { return nil, err } - keyFunc := func(name string, tags map[string]string) string { + keyFunc := func(_ string, tags map[string]string) string { return formatKeyFromTags(tags, tagKeys, callback) } return groupByKeyFunc(ec, fe, nextSeries, callback, keyFunc) diff --git a/app/vmselect/prometheus/prometheus.go b/app/vmselect/prometheus/prometheus.go index cb09f19c8..947df907b 100644 --- a/app/vmselect/prometheus/prometheus.go +++ b/app/vmselect/prometheus/prometheus.go @@ -251,7 +251,7 @@ func ExportNativeHandler(startTime time.Time, w http.ResponseWriter, r *http.Req _, _ = bw.Write(trBuf) // Marshal native blocks. - err = netstorage.ExportBlocks(nil, sq, cp.deadline, func(mn *storage.MetricName, b *storage.Block, tr storage.TimeRange, workerID uint) error { + err = netstorage.ExportBlocks(nil, sq, cp.deadline, func(mn *storage.MetricName, b *storage.Block, _ storage.TimeRange, workerID uint) error { if err := bw.Error(); err != nil { return err } @@ -1238,7 +1238,7 @@ func (sw *scalableWriter) maybeFlushBuffer(bb *bytesutil.ByteBuffer) error { } func (sw *scalableWriter) flush() error { - sw.m.Range(func(k, v interface{}) bool { + sw.m.Range(func(_, v interface{}) bool { bb := v.(*bytesutil.ByteBuffer) _, err := sw.bw.Write(bb.B) return err == nil diff --git a/app/vmselect/promql/aggr.go b/app/vmselect/promql/aggr.go index 81540267c..3f77635a8 100644 --- a/app/vmselect/promql/aggr.go +++ b/app/vmselect/promql/aggr.go @@ -76,7 +76,7 @@ func newAggrFunc(afe func(tss []*timeseries) []*timeseries) aggrFunc { if err != nil { return nil, err } - return aggrFuncExt(func(tss []*timeseries, modififer *metricsql.ModifierExpr) []*timeseries { + return aggrFuncExt(func(tss []*timeseries, _ *metricsql.ModifierExpr) []*timeseries { return afe(tss) }, tss, &afa.ae.Modifier, afa.ae.Limit, false) } @@ -158,7 +158,7 @@ func aggrFuncAny(afa *aggrFuncArg) ([]*timeseries, error) { if err != nil { return nil, err } - afe := func(tss []*timeseries, modifier *metricsql.ModifierExpr) []*timeseries { + afe := func(tss []*timeseries, _ *metricsql.ModifierExpr) []*timeseries { return tss[:1] } limit := afa.ae.Limit @@ -467,7 +467,7 @@ func aggrFuncShare(afa *aggrFuncArg) ([]*timeseries, error) { if err != nil { return nil, err } - afe := func(tss []*timeseries, modifier *metricsql.ModifierExpr) []*timeseries { + afe := func(tss []*timeseries, _ *metricsql.ModifierExpr) []*timeseries { for i := range tss[0].Values { // Calculate sum for non-negative points at position i. var sum float64 @@ -498,7 +498,7 @@ func aggrFuncZScore(afa *aggrFuncArg) ([]*timeseries, error) { if err != nil { return nil, err } - afe := func(tss []*timeseries, modifier *metricsql.ModifierExpr) []*timeseries { + afe := func(tss []*timeseries, _ *metricsql.ModifierExpr) []*timeseries { for i := range tss[0].Values { // Calculate avg and stddev for tss points at position i. // See `Rapid calculation methods` at https://en.wikipedia.org/wiki/Standard_deviation @@ -594,7 +594,7 @@ func aggrFuncCountValues(afa *aggrFuncArg) ([]*timeseries, error) { // Do nothing } - afe := func(tss []*timeseries, modififer *metricsql.ModifierExpr) ([]*timeseries, error) { + afe := func(tss []*timeseries, _ *metricsql.ModifierExpr) ([]*timeseries, error) { m := make(map[float64]*timeseries) for _, ts := range tss { for i, v := range ts.Values { @@ -656,7 +656,7 @@ func newAggrFuncTopK(isReverse bool) aggrFunc { if err != nil { return nil, err } - afe := func(tss []*timeseries, modififer *metricsql.ModifierExpr) []*timeseries { + afe := func(tss []*timeseries, _ *metricsql.ModifierExpr) []*timeseries { for n := range tss[0].Values { lessFunc := lessWithNaNs if isReverse { @@ -960,7 +960,7 @@ func aggrFuncOutliersIQR(afa *aggrFuncArg) ([]*timeseries, error) { if err := expectTransformArgsNum(args, 1); err != nil { return nil, err } - afe := func(tss []*timeseries, modifier *metricsql.ModifierExpr) []*timeseries { + afe := func(tss []*timeseries, _ *metricsql.ModifierExpr) []*timeseries { // Calculate lower and upper bounds for interquartile range per each point across tss // according to Outliers section at https://en.wikipedia.org/wiki/Interquartile_range lower, upper := getPerPointIQRBounds(tss) @@ -1016,7 +1016,7 @@ func aggrFuncOutliersMAD(afa *aggrFuncArg) ([]*timeseries, error) { if err != nil { return nil, err } - afe := func(tss []*timeseries, modifier *metricsql.ModifierExpr) []*timeseries { + afe := func(tss []*timeseries, _ *metricsql.ModifierExpr) []*timeseries { // Calculate medians for each point across tss. medians := getPerPointMedians(tss) // Calculate MAD values multiplied by tolerance for each point across tss. @@ -1052,7 +1052,7 @@ func aggrFuncOutliersK(afa *aggrFuncArg) ([]*timeseries, error) { if err != nil { return nil, err } - afe := func(tss []*timeseries, modifier *metricsql.ModifierExpr) []*timeseries { + afe := func(tss []*timeseries, _ *metricsql.ModifierExpr) []*timeseries { // Calculate medians for each point across tss. medians := getPerPointMedians(tss) // Return topK time series with the highest variance from median. @@ -1123,7 +1123,7 @@ func aggrFuncLimitK(afa *aggrFuncArg) ([]*timeseries, error) { if limit < 0 { limit = 0 } - afe := func(tss []*timeseries, modifier *metricsql.ModifierExpr) []*timeseries { + afe := func(tss []*timeseries, _ *metricsql.ModifierExpr) []*timeseries { // Sort series by metricName hash in order to get consistent set of output series // across multiple calls to limitk() function. // Sort series by hash in order to guarantee uniform selection across series. @@ -1187,7 +1187,7 @@ func aggrFuncQuantiles(afa *aggrFuncArg) ([]*timeseries, error) { phis[i] = phisLocal[0] } argOrig := args[len(args)-1] - afe := func(tss []*timeseries, modifier *metricsql.ModifierExpr) []*timeseries { + afe := func(tss []*timeseries, _ *metricsql.ModifierExpr) []*timeseries { tssDst := make([]*timeseries, len(phiArgs)) for j := range tssDst { ts := ×eries{} @@ -1244,7 +1244,7 @@ func aggrFuncMedian(afa *aggrFuncArg) ([]*timeseries, error) { } func newAggrQuantileFunc(phis []float64) func(tss []*timeseries, modifier *metricsql.ModifierExpr) []*timeseries { - return func(tss []*timeseries, modifier *metricsql.ModifierExpr) []*timeseries { + return func(tss []*timeseries, _ *metricsql.ModifierExpr) []*timeseries { dst := tss[0] a := getFloat64s() values := a.A diff --git a/app/vmselect/promql/binary_op.go b/app/vmselect/promql/binary_op.go index 1c56173bd..8ed4503b6 100644 --- a/app/vmselect/promql/binary_op.go +++ b/app/vmselect/promql/binary_op.go @@ -74,7 +74,7 @@ func newBinaryOpCmpFunc(cf func(left, right float64) bool) binaryOpFunc { } func newBinaryOpArithFunc(af func(left, right float64) float64) binaryOpFunc { - afe := func(left, right float64, isBool bool) float64 { + afe := func(left, right float64, _ bool) float64 { return af(left, right) } return newBinaryOpFunc(afe) diff --git a/app/vmselect/promql/exec_test.go b/app/vmselect/promql/exec_test.go index 412ab78b0..d253a9342 100644 --- a/app/vmselect/promql/exec_test.go +++ b/app/vmselect/promql/exec_test.go @@ -210,11 +210,13 @@ func TestExecSuccess(t *testing.T) { f(q, resultExpected) }) t.Run("scalar-string-nonnum", func(t *testing.T) { + t.Parallel() q := `scalar("fooobar")` resultExpected := []netstorage.Result{} f(q, resultExpected) }) t.Run("scalar-string-num", func(t *testing.T) { + t.Parallel() q := `scalar("-12.34")` r := netstorage.Result{ MetricName: metricNameExpected, diff --git a/app/vmselect/promql/rollup.go b/app/vmselect/promql/rollup.go index 4700ba643..47af5c3b7 100644 --- a/app/vmselect/promql/rollup.go +++ b/app/vmselect/promql/rollup.go @@ -371,10 +371,10 @@ func getRollupTag(expr metricsql.Expr) (string, error) { func getRollupConfigs(funcName string, rf rollupFunc, expr metricsql.Expr, start, end, step int64, maxPointsPerSeries int, window, lookbackDelta int64, sharedTimestamps []int64) ( func(values []float64, timestamps []int64), []*rollupConfig, error) { - preFunc := func(values []float64, timestamps []int64) {} + preFunc := func(_ []float64, _ []int64) {} funcName = strings.ToLower(funcName) if rollupFuncsRemoveCounterResets[funcName] { - preFunc = func(values []float64, timestamps []int64) { + preFunc = func(values []float64, _ []int64) { removeCounterResets(values) } } @@ -486,7 +486,7 @@ func getRollupConfigs(funcName string, rf rollupFunc, expr metricsql.Expr, start for _, aggrFuncName := range aggrFuncNames { if rollupFuncsRemoveCounterResets[aggrFuncName] { // There is no need to save the previous preFunc, since it is either empty or the same. - preFunc = func(values []float64, timestamps []int64) { + preFunc = func(values []float64, _ []int64) { removeCounterResets(values) } } diff --git a/app/vmselect/promql/rollup_result_cache_test.go b/app/vmselect/promql/rollup_result_cache_test.go index 7b11f9799..1ab772d79 100644 --- a/app/vmselect/promql/rollup_result_cache_test.go +++ b/app/vmselect/promql/rollup_result_cache_test.go @@ -10,13 +10,13 @@ import ( ) func TestRollupResultCacheInitStop(t *testing.T) { - t.Run("inmemory", func(t *testing.T) { + t.Run("inmemory", func(_ *testing.T) { for i := 0; i < 5; i++ { InitRollupResultCache("") StopRollupResultCache() } }) - t.Run("file-based", func(t *testing.T) { + t.Run("file-based", func(_ *testing.T) { cacheFilePath := "test-rollup-result-cache" for i := 0; i < 3; i++ { InitRollupResultCache(cacheFilePath) diff --git a/app/vmselect/promql/transform.go b/app/vmselect/promql/transform.go index fadc072cf..335f2a9db 100644 --- a/app/vmselect/promql/transform.go +++ b/app/vmselect/promql/transform.go @@ -918,7 +918,7 @@ func transformHistogramQuantile(tfa *transformFuncArg) ([]*timeseries, error) { m := groupLeTimeseries(tss) // Calculate quantile for each group in m - lastNonInf := func(i int, xss []leTimeseries) float64 { + lastNonInf := func(_ int, xss []leTimeseries) float64 { for len(xss) > 0 { xsLast := xss[len(xss)-1] if !math.IsInf(xsLast.le, 0) { diff --git a/lib/httpserver/httpserver.go b/lib/httpserver/httpserver.go index 45ea74903..6c6958455 100644 --- a/lib/httpserver/httpserver.go +++ b/lib/httpserver/httpserver.go @@ -90,7 +90,7 @@ type RequestHandler func(w http.ResponseWriter, r *http.Request) bool // See https://www.haproxy.org/download/1.8/doc/proxy-protocol.txt func Serve(addrs []string, useProxyProtocol *flagutil.ArrayBool, rh RequestHandler) { if rh == nil { - rh = func(w http.ResponseWriter, r *http.Request) bool { + rh = func(_ http.ResponseWriter, _ *http.Request) bool { return false } } @@ -152,7 +152,7 @@ func serveWithListener(addr string, ln net.Listener, rh RequestHandler) { ErrorLog: logger.StdErrorLogger(), } if *connTimeout > 0 { - s.s.ConnContext = func(ctx context.Context, c net.Conn) context.Context { + s.s.ConnContext = func(ctx context.Context, _ net.Conn) context.Context { timeoutSec := connTimeout.Seconds() // Add a jitter for connection timeout in order to prevent Thundering herd problem // when all the connections are established at the same time. diff --git a/lib/logstorage/filters_test.go b/lib/logstorage/filters_test.go index cf7d6e782..a565e3838 100644 --- a/lib/logstorage/filters_test.go +++ b/lib/logstorage/filters_test.go @@ -211,11 +211,11 @@ func TestFilterBitmap(t *testing.T) { }) // Clear all the bits - bm.forEachSetBit(func(idx int) bool { + bm.forEachSetBit(func(_ int) bool { return false }) bitsCount := 0 - bm.forEachSetBit(func(idx int) bool { + bm.forEachSetBit(func(_ int) bool { bitsCount++ return true }) @@ -9226,7 +9226,7 @@ func testFilterMatchForStorage(t *testing.T, s *Storage, tenantID TenantID, f fi resultColumnNames: []string{resultColumnName}, } workersCount := 3 - s.search(workersCount, so, nil, func(workerID uint, br *blockResult) { + s.search(workersCount, so, nil, func(_ uint, br *blockResult) { // Verify tenantID if !br.streamID.tenantID.equal(&tenantID) { t.Fatalf("unexpected tenantID in blockResult; got %s; want %s", &br.streamID.tenantID, &tenantID) diff --git a/lib/logstorage/indexdb_test.go b/lib/logstorage/indexdb_test.go index 02e0951f0..de92fda7c 100644 --- a/lib/logstorage/indexdb_test.go +++ b/lib/logstorage/indexdb_test.go @@ -76,175 +76,174 @@ func TestStorageSearchStreamIDs(t *testing.T) { } } }) - t.Run("missing-job", func(t *testing.T) { - f(`{job="non-existing-job",instance="instance-0"}`, nil) - }) - t.Run("missing-job-re", func(t *testing.T) { - f(`{job=~"non-existing-job|",instance="instance-0"}`, nil) - }) - t.Run("missing-job-negative-re", func(t *testing.T) { - f(`{job!~"job.+",instance="instance-0"}`, nil) - }) - t.Run("empty-job", func(t *testing.T) { - f(`{job="",instance="instance-0"}`, nil) - }) - t.Run("missing-instance", func(t *testing.T) { - f(`{job="job-0",instance="non-existing-instance"}`, nil) - }) - t.Run("missing-instance-re", func(t *testing.T) { - f(`{job="job-0",instance=~"non-existing-instance|"}`, nil) - }) - t.Run("missing-instance-negative-re", func(t *testing.T) { - f(`{job="job-0",instance!~"instance.+"}`, nil) - }) - t.Run("empty-instance", func(t *testing.T) { - f(`{job="job-0",instance=""}`, nil) - }) - t.Run("non-existing-tag", func(t *testing.T) { - f(`{job="job-0",instance="instance-0",non_existing_tag="foobar"}`, nil) - }) - t.Run("non-existing-non-empty-tag", func(t *testing.T) { - f(`{job="job-0",instance="instance-0",non_existing_tag!=""}`, nil) - }) - t.Run("non-existing-tag-re", func(t *testing.T) { - f(`{job="job-0",instance="instance-0",non_existing_tag=~"foo.+"}`, nil) - }) - t.Run("non-existing-non-empty-tag-re", func(t *testing.T) { - f(`{job="job-0",instance="instance-0",non_existing_tag!~""}`, nil) - }) - t.Run("match-job-instance", func(t *testing.T) { + // missing-job + f(`{job="non-existing-job",instance="instance-0"}`, nil) + + // missing-job-re + f(`{job=~"non-existing-job|",instance="instance-0"}`, nil) + + // missing-job-negative-re + f(`{job!~"job.+",instance="instance-0"}`, nil) + + // empty-job + f(`{job="",instance="instance-0"}`, nil) + + // missing-instance + f(`{job="job-0",instance="non-existing-instance"}`, nil) + + // missing-instance-re + f(`{job="job-0",instance=~"non-existing-instance|"}`, nil) + + // missing-instance-negative-re + f(`{job="job-0",instance!~"instance.+"}`, nil) + + // empty-instance + f(`{job="job-0",instance=""}`, nil) + + // non-existing-tag + f(`{job="job-0",instance="instance-0",non_existing_tag="foobar"}`, nil) + + // non-existing-non-empty-tag + f(`{job="job-0",instance="instance-0",non_existing_tag!=""}`, nil) + + // non-existing-tag-re + f(`{job="job-0",instance="instance-0",non_existing_tag=~"foo.+"}`, nil) + + //non-existing-non-empty-tag-re + f(`{job="job-0",instance="instance-0",non_existing_tag!~""}`, nil) + + // match-job-instance + sid, _ := getStreamIDForTags(map[string]string{ + "instance": "instance-0", + "job": "job-0", + }) + f(`{job="job-0",instance="instance-0"}`, []streamID{sid}) + + // match-non-existing-tag + sid, _ = getStreamIDForTags(map[string]string{ + "instance": "instance-0", + "job": "job-0", + }) + f(`{job="job-0",instance="instance-0",non_existing_tag=~"foo|"}`, []streamID{sid}) + + // match-job + var streamIDs []streamID + for i := 0; i < instancesCount; i++ { sid, _ := getStreamIDForTags(map[string]string{ - "instance": "instance-0", + "instance": fmt.Sprintf("instance-%d", i), "job": "job-0", }) - f(`{job="job-0",instance="instance-0"}`, []streamID{sid}) - }) - t.Run("match-non-existing-tag", func(t *testing.T) { + streamIDs = append(streamIDs, sid) + } + f(`{job="job-0"}`, streamIDs) + + // match-instance + streamIDs = nil + for i := 0; i < jobsCount; i++ { sid, _ := getStreamIDForTags(map[string]string{ - "instance": "instance-0", - "job": "job-0", + "instance": "instance-1", + "job": fmt.Sprintf("job-%d", i), }) - f(`{job="job-0",instance="instance-0",non_existing_tag=~"foo|"}`, []streamID{sid}) - }) - t.Run("match-job", func(t *testing.T) { - var streamIDs []streamID - for i := 0; i < instancesCount; i++ { + streamIDs = append(streamIDs, sid) + } + f(`{instance="instance-1"}`, streamIDs) + + // match-re + streamIDs = nil + for _, instanceID := range []int{3, 1} { + for _, jobID := range []int{0, 2} { sid, _ := getStreamIDForTags(map[string]string{ - "instance": fmt.Sprintf("instance-%d", i), - "job": "job-0", + "instance": fmt.Sprintf("instance-%d", instanceID), + "job": fmt.Sprintf("job-%d", jobID), }) streamIDs = append(streamIDs, sid) } - f(`{job="job-0"}`, streamIDs) - }) - t.Run("match-instance", func(t *testing.T) { - var streamIDs []streamID - for i := 0; i < jobsCount; i++ { + } + f(`{job=~"job-(0|2)",instance=~"instance-[13]"}`, streamIDs) + + // match-re-empty-match + streamIDs = nil + for _, instanceID := range []int{3, 1} { + for _, jobID := range []int{0, 2} { sid, _ := getStreamIDForTags(map[string]string{ - "instance": "instance-1", - "job": fmt.Sprintf("job-%d", i), + "instance": fmt.Sprintf("instance-%d", instanceID), + "job": fmt.Sprintf("job-%d", jobID), }) streamIDs = append(streamIDs, sid) } - f(`{instance="instance-1"}`, streamIDs) - }) - t.Run("match-re", func(t *testing.T) { - var streamIDs []streamID - for _, instanceID := range []int{3, 1} { - for _, jobID := range []int{0, 2} { - sid, _ := getStreamIDForTags(map[string]string{ - "instance": fmt.Sprintf("instance-%d", instanceID), - "job": fmt.Sprintf("job-%d", jobID), - }) - streamIDs = append(streamIDs, sid) - } + } + f(`{job=~"job-(0|2)|",instance=~"instance-[13]"}`, streamIDs) + + // match-negative-re + var instanceIDs []int + for i := 0; i < instancesCount; i++ { + if i != 0 && i != 1 { + instanceIDs = append(instanceIDs, i) } - f(`{job=~"job-(0|2)",instance=~"instance-[13]"}`, streamIDs) - }) - t.Run("match-re-empty-match", func(t *testing.T) { - var streamIDs []streamID - for _, instanceID := range []int{3, 1} { - for _, jobID := range []int{0, 2} { - sid, _ := getStreamIDForTags(map[string]string{ - "instance": fmt.Sprintf("instance-%d", instanceID), - "job": fmt.Sprintf("job-%d", jobID), - }) - streamIDs = append(streamIDs, sid) - } + } + var jobIDs []int + for i := 0; i < jobsCount; i++ { + if i > 2 { + jobIDs = append(jobIDs, i) } - f(`{job=~"job-(0|2)|",instance=~"instance-[13]"}`, streamIDs) - }) - t.Run("match-negative-re", func(t *testing.T) { - var instanceIDs []int - for i := 0; i < instancesCount; i++ { - if i != 0 && i != 1 { - instanceIDs = append(instanceIDs, i) - } + } + streamIDs = nil + for _, instanceID := range instanceIDs { + for _, jobID := range jobIDs { + sid, _ := getStreamIDForTags(map[string]string{ + "instance": fmt.Sprintf("instance-%d", instanceID), + "job": fmt.Sprintf("job-%d", jobID), + }) + streamIDs = append(streamIDs, sid) } - var jobIDs []int - for i := 0; i < jobsCount; i++ { - if i > 2 { - jobIDs = append(jobIDs, i) - } + } + f(`{job!~"job-[0-2]",instance!~"instance-(0|1)"}`, streamIDs) + + // match-negative-re-empty-match + instanceIDs = nil + for i := 0; i < instancesCount; i++ { + if i != 0 && i != 1 { + instanceIDs = append(instanceIDs, i) } - var streamIDs []streamID - for _, instanceID := range instanceIDs { - for _, jobID := range jobIDs { - sid, _ := getStreamIDForTags(map[string]string{ - "instance": fmt.Sprintf("instance-%d", instanceID), - "job": fmt.Sprintf("job-%d", jobID), - }) - streamIDs = append(streamIDs, sid) - } + } + jobIDs = nil + for i := 0; i < jobsCount; i++ { + if i > 2 { + jobIDs = append(jobIDs, i) } - f(`{job!~"job-[0-2]",instance!~"instance-(0|1)"}`, streamIDs) - }) - t.Run("match-negative-re-empty-match", func(t *testing.T) { - var instanceIDs []int - for i := 0; i < instancesCount; i++ { - if i != 0 && i != 1 { - instanceIDs = append(instanceIDs, i) - } + } + streamIDs = nil + for _, instanceID := range instanceIDs { + for _, jobID := range jobIDs { + sid, _ := getStreamIDForTags(map[string]string{ + "instance": fmt.Sprintf("instance-%d", instanceID), + "job": fmt.Sprintf("job-%d", jobID), + }) + streamIDs = append(streamIDs, sid) } - var jobIDs []int - for i := 0; i < jobsCount; i++ { - if i > 2 { - jobIDs = append(jobIDs, i) - } + } + f(`{job!~"job-[0-2]",instance!~"instance-(0|1)|"}`, streamIDs) + + // match-negative-job + instanceIDs = []int{2} + jobIDs = nil + for i := 0; i < jobsCount; i++ { + if i != 1 { + jobIDs = append(jobIDs, i) } - var streamIDs []streamID - for _, instanceID := range instanceIDs { - for _, jobID := range jobIDs { - sid, _ := getStreamIDForTags(map[string]string{ - "instance": fmt.Sprintf("instance-%d", instanceID), - "job": fmt.Sprintf("job-%d", jobID), - }) - streamIDs = append(streamIDs, sid) - } + } + streamIDs = nil + for _, instanceID := range instanceIDs { + for _, jobID := range jobIDs { + sid, _ := getStreamIDForTags(map[string]string{ + "instance": fmt.Sprintf("instance-%d", instanceID), + "job": fmt.Sprintf("job-%d", jobID), + }) + streamIDs = append(streamIDs, sid) } - f(`{job!~"job-[0-2]",instance!~"instance-(0|1)|"}`, streamIDs) - }) - t.Run("match-negative-job", func(t *testing.T) { - instanceIDs := []int{2} - var jobIDs []int - for i := 0; i < jobsCount; i++ { - if i != 1 { - jobIDs = append(jobIDs, i) - } - } - var streamIDs []streamID - for _, instanceID := range instanceIDs { - for _, jobID := range jobIDs { - sid, _ := getStreamIDForTags(map[string]string{ - "instance": fmt.Sprintf("instance-%d", instanceID), - "job": fmt.Sprintf("job-%d", jobID), - }) - streamIDs = append(streamIDs, sid) - } - } - f(`{instance="instance-2",job!="job-1"}`, streamIDs) - }) + } + f(`{instance="instance-2",job!="job-1"}`, streamIDs) mustCloseIndexdb(idb) fs.MustRemoveAll(path) diff --git a/lib/logstorage/storage_search.go b/lib/logstorage/storage_search.go index c728d9998..69059e6f5 100644 --- a/lib/logstorage/storage_search.go +++ b/lib/logstorage/storage_search.go @@ -51,7 +51,7 @@ func (s *Storage) RunQuery(tenantIDs []TenantID, q *Query, stopCh <-chan struct{ resultColumnNames: resultColumnNames, } workersCount := cgroup.AvailableCPUs() - s.search(workersCount, so, stopCh, func(workerID uint, br *blockResult) { + s.search(workersCount, so, stopCh, func(_ uint, br *blockResult) { brs := getBlockRows() cs := brs.cs diff --git a/lib/logstorage/storage_search_test.go b/lib/logstorage/storage_search_test.go index adc29a4bd..886ec56fe 100644 --- a/lib/logstorage/storage_search_test.go +++ b/lib/logstorage/storage_search_test.go @@ -78,25 +78,25 @@ func TestStorageRunQuery(t *testing.T) { s.debugFlush() // run tests on the storage data - t.Run("missing-tenant", func(t *testing.T) { + t.Run("missing-tenant", func(_ *testing.T) { q := mustParseQuery(`"log message"`) tenantID := TenantID{ AccountID: 0, ProjectID: 0, } - processBlock := func(columns []BlockColumn) { + processBlock := func(_ []BlockColumn) { panic(fmt.Errorf("unexpected match")) } tenantIDs := []TenantID{tenantID} s.RunQuery(tenantIDs, q, nil, processBlock) }) - t.Run("missing-message-text", func(t *testing.T) { + t.Run("missing-message-text", func(_ *testing.T) { q := mustParseQuery(`foobar`) tenantID := TenantID{ AccountID: 1, ProjectID: 11, } - processBlock := func(columns []BlockColumn) { + processBlock := func(_ []BlockColumn) { panic(fmt.Errorf("unexpected match")) } tenantIDs := []TenantID{tenantID} @@ -168,9 +168,9 @@ func TestStorageRunQuery(t *testing.T) { t.Fatalf("unexpected number of matching rows; got %d; want %d", n, expectedRowsCount) } }) - t.Run("stream-filter-mismatch", func(t *testing.T) { + t.Run("stream-filter-mismatch", func(_ *testing.T) { q := mustParseQuery(`_stream:{job="foobar",instance=~"host-.+:2345"} log`) - processBlock := func(columns []BlockColumn) { + processBlock := func(_ []BlockColumn) { panic(fmt.Errorf("unexpected match")) } s.RunQuery(allTenantIDs, q, nil, processBlock) @@ -273,7 +273,7 @@ func TestStorageRunQuery(t *testing.T) { t.Fatalf("unexpected number of rows; got %d; want %d", n, expectedRowsCount) } }) - t.Run("matching-stream-id-missing-time-range", func(t *testing.T) { + t.Run("matching-stream-id-missing-time-range", func(_ *testing.T) { minTimestamp := baseTimestamp + (rowsPerBlock+1)*1e9 maxTimestamp := baseTimestamp + (rowsPerBlock+2)*1e9 q := mustParseQuery(fmt.Sprintf(`_stream:{job="foobar",instance="host-1:234"} _time:[%d, %d)`, minTimestamp/1e9, maxTimestamp/1e9)) @@ -281,13 +281,13 @@ func TestStorageRunQuery(t *testing.T) { AccountID: 1, ProjectID: 11, } - processBlock := func(columns []BlockColumn) { + processBlock := func(_ []BlockColumn) { panic(fmt.Errorf("unexpected match")) } tenantIDs := []TenantID{tenantID} s.RunQuery(tenantIDs, q, nil, processBlock) }) - t.Run("missing-time-range", func(t *testing.T) { + t.Run("missing-time-range", func(_ *testing.T) { minTimestamp := baseTimestamp + (rowsPerBlock+1)*1e9 maxTimestamp := baseTimestamp + (rowsPerBlock+2)*1e9 q := mustParseQuery(fmt.Sprintf(`_time:[%d, %d)`, minTimestamp/1e9, maxTimestamp/1e9)) @@ -295,7 +295,7 @@ func TestStorageRunQuery(t *testing.T) { AccountID: 1, ProjectID: 11, } - processBlock := func(columns []BlockColumn) { + processBlock := func(_ []BlockColumn) { panic(fmt.Errorf("unexpected match")) } tenantIDs := []TenantID{tenantID} @@ -392,7 +392,7 @@ func TestStorageSearch(t *testing.T) { } } - t.Run("missing-tenant-smaller-than-existing", func(t *testing.T) { + t.Run("missing-tenant-smaller-than-existing", func(_ *testing.T) { tenantID := TenantID{ AccountID: 0, ProjectID: 0, @@ -405,12 +405,12 @@ func TestStorageSearch(t *testing.T) { filter: f, resultColumnNames: []string{"_msg"}, } - processBlock := func(workerID uint, br *blockResult) { + processBlock := func(_ uint, _ *blockResult) { panic(fmt.Errorf("unexpected match")) } s.search(workersCount, so, nil, processBlock) }) - t.Run("missing-tenant-bigger-than-existing", func(t *testing.T) { + t.Run("missing-tenant-bigger-than-existing", func(_ *testing.T) { tenantID := TenantID{ AccountID: tenantsCount + 1, ProjectID: 0, @@ -423,12 +423,12 @@ func TestStorageSearch(t *testing.T) { filter: f, resultColumnNames: []string{"_msg"}, } - processBlock := func(workerID uint, br *blockResult) { + processBlock := func(_ uint, _ *blockResult) { panic(fmt.Errorf("unexpected match")) } s.search(workersCount, so, nil, processBlock) }) - t.Run("missing-tenant-middle", func(t *testing.T) { + t.Run("missing-tenant-middle", func(_ *testing.T) { tenantID := TenantID{ AccountID: 1, ProjectID: 0, @@ -441,7 +441,7 @@ func TestStorageSearch(t *testing.T) { filter: f, resultColumnNames: []string{"_msg"}, } - processBlock := func(workerID uint, br *blockResult) { + processBlock := func(_ uint, _ *blockResult) { panic(fmt.Errorf("unexpected match")) } s.search(workersCount, so, nil, processBlock) @@ -461,7 +461,7 @@ func TestStorageSearch(t *testing.T) { resultColumnNames: []string{"_msg"}, } var rowsCount atomic.Uint32 - processBlock := func(workerID uint, br *blockResult) { + processBlock := func(_ uint, br *blockResult) { if !br.streamID.tenantID.equal(&tenantID) { panic(fmt.Errorf("unexpected tenantID; got %s; want %s", &br.streamID.tenantID, &tenantID)) } @@ -485,7 +485,7 @@ func TestStorageSearch(t *testing.T) { resultColumnNames: []string{"_msg"}, } var rowsCount atomic.Uint32 - processBlock := func(workerID uint, br *blockResult) { + processBlock := func(_ uint, br *blockResult) { rowsCount.Add(uint32(br.RowsCount())) } s.search(workersCount, so, nil, processBlock) @@ -495,7 +495,7 @@ func TestStorageSearch(t *testing.T) { t.Fatalf("unexpected number of matching rows; got %d; want %d", n, expectedRowsCount) } }) - t.Run("stream-filter-mismatch", func(t *testing.T) { + t.Run("stream-filter-mismatch", func(_ *testing.T) { sf := mustNewStreamFilter(`{job="foobar",instance=~"host-.+:2345"}`) minTimestamp := baseTimestamp maxTimestamp := baseTimestamp + rowsPerBlock*1e9 + blocksPerStream @@ -505,7 +505,7 @@ func TestStorageSearch(t *testing.T) { filter: f, resultColumnNames: []string{"_msg"}, } - processBlock := func(workerID uint, br *blockResult) { + processBlock := func(_ uint, _ *blockResult) { panic(fmt.Errorf("unexpected match")) } s.search(workersCount, so, nil, processBlock) @@ -526,7 +526,7 @@ func TestStorageSearch(t *testing.T) { resultColumnNames: []string{"_msg"}, } var rowsCount atomic.Uint32 - processBlock := func(workerID uint, br *blockResult) { + processBlock := func(_ uint, br *blockResult) { if !br.streamID.tenantID.equal(&tenantID) { panic(fmt.Errorf("unexpected tenantID; got %s; want %s", &br.streamID.tenantID, &tenantID)) } @@ -555,7 +555,7 @@ func TestStorageSearch(t *testing.T) { resultColumnNames: []string{"_msg"}, } var rowsCount atomic.Uint32 - processBlock := func(workerID uint, br *blockResult) { + processBlock := func(_ uint, br *blockResult) { if !br.streamID.tenantID.equal(&tenantID) { panic(fmt.Errorf("unexpected tenantID; got %s; want %s", &br.streamID.tenantID, &tenantID)) } @@ -592,7 +592,7 @@ func TestStorageSearch(t *testing.T) { resultColumnNames: []string{"_msg"}, } var rowsCount atomic.Uint32 - processBlock := func(workerID uint, br *blockResult) { + processBlock := func(_ uint, br *blockResult) { if !br.streamID.tenantID.equal(&tenantID) { panic(fmt.Errorf("unexpected tenantID; got %s; want %s", &br.streamID.tenantID, &tenantID)) } @@ -620,7 +620,7 @@ func TestStorageSearch(t *testing.T) { resultColumnNames: []string{"_msg"}, } var rowsCount atomic.Uint32 - processBlock := func(workerID uint, br *blockResult) { + processBlock := func(_ uint, br *blockResult) { rowsCount.Add(uint32(br.RowsCount())) } s.search(workersCount, so, nil, processBlock) @@ -630,7 +630,7 @@ func TestStorageSearch(t *testing.T) { t.Fatalf("unexpected number of rows; got %d; want %d", n, expectedRowsCount) } }) - t.Run("matching-stream-id-missing-time-range", func(t *testing.T) { + t.Run("matching-stream-id-missing-time-range", func(_ *testing.T) { sf := mustNewStreamFilter(`{job="foobar",instance="host-1:234"}`) tenantID := TenantID{ AccountID: 1, @@ -644,7 +644,7 @@ func TestStorageSearch(t *testing.T) { filter: f, resultColumnNames: []string{"_msg"}, } - processBlock := func(workerID uint, br *blockResult) { + processBlock := func(_ uint, _ *blockResult) { panic(fmt.Errorf("unexpected match")) } s.search(workersCount, so, nil, processBlock) diff --git a/lib/netutil/tls.go b/lib/netutil/tls.go index 03cc671ae..e1e0ccf57 100644 --- a/lib/netutil/tls.go +++ b/lib/netutil/tls.go @@ -32,7 +32,7 @@ func GetServerTLSConfig(tlsCertFile, tlsKeyFile, tlsMinVersion string, tlsCipher MinVersion: minVersion, // Do not set MaxVersion, since this has no sense from security PoV. // This can only result in lower security level if improperly set. - GetCertificate: func(info *tls.ClientHelloInfo) (*tls.Certificate, error) { + GetCertificate: func(_ *tls.ClientHelloInfo) (*tls.Certificate, error) { certLock.Lock() defer certLock.Unlock() if fasttime.UnixTimestamp() > certDeadline { diff --git a/lib/persistentqueue/persistentqueue_test.go b/lib/persistentqueue/persistentqueue_test.go index 2099d9e75..b518d625a 100644 --- a/lib/persistentqueue/persistentqueue_test.go +++ b/lib/persistentqueue/persistentqueue_test.go @@ -22,7 +22,7 @@ func TestQueueOpenClose(t *testing.T) { } func TestQueueOpen(t *testing.T) { - t.Run("invalid-metainfo", func(t *testing.T) { + t.Run("invalid-metainfo", func(_ *testing.T) { path := "queue-open-invalid-metainfo" mustCreateDir(path) mustCreateFile(filepath.Join(path, metainfoFilename), "foobarbaz") @@ -30,7 +30,7 @@ func TestQueueOpen(t *testing.T) { q.MustClose() mustDeleteDir(path) }) - t.Run("junk-files-and-dirs", func(t *testing.T) { + t.Run("junk-files-and-dirs", func(_ *testing.T) { path := "queue-open-junk-files-and-dir" mustCreateDir(path) mustCreateEmptyMetainfo(path, "foobar") @@ -40,7 +40,7 @@ func TestQueueOpen(t *testing.T) { q.MustClose() mustDeleteDir(path) }) - t.Run("invalid-chunk-offset", func(t *testing.T) { + t.Run("invalid-chunk-offset", func(_ *testing.T) { path := "queue-open-invalid-chunk-offset" mustCreateDir(path) mustCreateEmptyMetainfo(path, "foobar") @@ -49,7 +49,7 @@ func TestQueueOpen(t *testing.T) { q.MustClose() mustDeleteDir(path) }) - t.Run("too-new-chunk", func(t *testing.T) { + t.Run("too-new-chunk", func(_ *testing.T) { path := "queue-open-too-new-chunk" mustCreateDir(path) mustCreateEmptyMetainfo(path, "foobar") @@ -88,7 +88,7 @@ func TestQueueOpen(t *testing.T) { q.MustClose() mustDeleteDir(path) }) - t.Run("metainfo-dir", func(t *testing.T) { + t.Run("metainfo-dir", func(_ *testing.T) { path := "queue-open-metainfo-dir" mustCreateDir(path) mustCreateDir(filepath.Join(path, metainfoFilename)) @@ -112,7 +112,7 @@ func TestQueueOpen(t *testing.T) { q.MustClose() mustDeleteDir(path) }) - t.Run("invalid-writer-file-size", func(t *testing.T) { + t.Run("invalid-writer-file-size", func(_ *testing.T) { path := "too-small-reader-file" mustCreateDir(path) mustCreateEmptyMetainfo(path, "foobar") diff --git a/lib/promrelabel/config_test.go b/lib/promrelabel/config_test.go index 8d4f4340d..539efaa1d 100644 --- a/lib/promrelabel/config_test.go +++ b/lib/promrelabel/config_test.go @@ -101,12 +101,12 @@ func TestLoadRelabelConfigsFailure(t *testing.T) { t.Fatalf("unexpected non-empty rcs: %#v", rcs) } } - t.Run("non-existing-file", func(t *testing.T) { - f("testdata/non-exsiting-file") - }) - t.Run("invalid-file", func(t *testing.T) { - f("testdata/invalid_config.yml") - }) + + // non-existing-file + f("testdata/non-exsiting-file") + + // invalid-file + f("testdata/invalid_config.yml") } func TestParsedConfigsString(t *testing.T) { @@ -209,410 +209,410 @@ func TestParseRelabelConfigsFailure(t *testing.T) { t.Fatalf("unexpected non-empty pcs: %#v", pcs) } } - t.Run("invalid-regex", func(t *testing.T) { - f([]RelabelConfig{ - { - SourceLabels: []string{"aaa"}, - TargetLabel: "xxx", - Regex: &MultiLineRegex{ - S: "foo[bar", - }, + + // invalid regex + f([]RelabelConfig{ + { + SourceLabels: []string{"aaa"}, + TargetLabel: "xxx", + Regex: &MultiLineRegex{ + S: "foo[bar", }, - }) + }, }) - t.Run("replace-missing-target-label", func(t *testing.T) { - f([]RelabelConfig{ - { - Action: "replace", - SourceLabels: []string{"foo"}, + + // replace-missing-target-label + f([]RelabelConfig{ + { + Action: "replace", + SourceLabels: []string{"foo"}, + }, + }) + + // replace_all-missing-source-labels + f([]RelabelConfig{ + { + Action: "replace_all", + TargetLabel: "xxx", + }, + }) + + // replace_all-missing-target-label + f([]RelabelConfig{ + { + Action: "replace_all", + SourceLabels: []string{"foo"}, + }, + }) + + // keep-missing-source-labels + f([]RelabelConfig{ + { + Action: "keep", + }, + }) + + // keep_if_contains-missing-target-label + f([]RelabelConfig{ + { + Action: "keep_if_contains", + SourceLabels: []string{"foo"}, + }, + }) + + // keep_if_contains-missing-source-labels + f([]RelabelConfig{ + { + Action: "keep_if_contains", + TargetLabel: "foo", + }, + }) + + // keep_if_contains-unused-regex + f([]RelabelConfig{ + { + Action: "keep_if_contains", + TargetLabel: "foo", + SourceLabels: []string{"bar"}, + Regex: &MultiLineRegex{ + S: "bar", }, - }) + }, }) - t.Run("replace_all-missing-source-labels", func(t *testing.T) { - f([]RelabelConfig{ - { - Action: "replace_all", - TargetLabel: "xxx", + + // drop_if_contains-missing-target-label + f([]RelabelConfig{ + { + Action: "drop_if_contains", + SourceLabels: []string{"foo"}, + }, + }) + + // drop_if_contains-missing-source-labels + f([]RelabelConfig{ + { + Action: "drop_if_contains", + TargetLabel: "foo", + }, + }) + + // drop_if_contains-unused-regex + f([]RelabelConfig{ + { + Action: "drop_if_contains", + TargetLabel: "foo", + SourceLabels: []string{"bar"}, + Regex: &MultiLineRegex{ + S: "bar", }, - }) + }, }) - t.Run("replace_all-missing-target-label", func(t *testing.T) { - f([]RelabelConfig{ - { - Action: "replace_all", - SourceLabels: []string{"foo"}, + + // keep_if_equal-missing-source-labels + f([]RelabelConfig{ + { + Action: "keep_if_equal", + }, + }) + + // keep_if_equal-single-source-label + f([]RelabelConfig{ + { + Action: "keep_if_equal", + SourceLabels: []string{"foo"}, + }, + }) + + // keep_if_equal-unused-target-label + f([]RelabelConfig{ + { + Action: "keep_if_equal", + SourceLabels: []string{"foo", "bar"}, + TargetLabel: "foo", + }, + }) + + // keep_if_equal-unused-regex + f([]RelabelConfig{ + { + Action: "keep_if_equal", + SourceLabels: []string{"foo", "bar"}, + Regex: &MultiLineRegex{ + S: "bar", }, - }) + }, }) - t.Run("keep-missing-source-labels", func(t *testing.T) { - f([]RelabelConfig{ - { - Action: "keep", + + // drop_if_equal-missing-source-labels + f([]RelabelConfig{ + { + Action: "drop_if_equal", + }, + }) + + // drop_if_equal-single-source-label + f([]RelabelConfig{ + { + Action: "drop_if_equal", + SourceLabels: []string{"foo"}, + }, + }) + + // drop_if_equal-unused-target-label + f([]RelabelConfig{ + { + Action: "drop_if_equal", + SourceLabels: []string{"foo", "bar"}, + TargetLabel: "foo", + }, + }) + + // drop_if_equal-unused-regex + f([]RelabelConfig{ + { + Action: "drop_if_equal", + SourceLabels: []string{"foo", "bar"}, + Regex: &MultiLineRegex{ + S: "bar", }, - }) + }, }) - t.Run("keep_if_contains-missing-target-label", func(t *testing.T) { - f([]RelabelConfig{ - { - Action: "keep_if_contains", - SourceLabels: []string{"foo"}, + + // keepequal-missing-source-labels + f([]RelabelConfig{ + { + Action: "keepequal", + }, + }) + + // keepequal-missing-target-label + f([]RelabelConfig{ + { + Action: "keepequal", + SourceLabels: []string{"foo"}, + }, + }) + + // keepequal-unused-regex + f([]RelabelConfig{ + { + Action: "keepequal", + SourceLabels: []string{"foo"}, + TargetLabel: "foo", + Regex: &MultiLineRegex{ + S: "bar", }, - }) + }, }) - t.Run("keep_if_contains-missing-source-labels", func(t *testing.T) { - f([]RelabelConfig{ - { - Action: "keep_if_contains", - TargetLabel: "foo", + + // dropequal-missing-source-labels + f([]RelabelConfig{ + { + Action: "dropequal", + }, + }) + + // dropequal-missing-target-label + f([]RelabelConfig{ + { + Action: "dropequal", + SourceLabels: []string{"foo"}, + }, + }) + + // dropequal-unused-regex + f([]RelabelConfig{ + { + Action: "dropequal", + SourceLabels: []string{"foo"}, + TargetLabel: "foo", + Regex: &MultiLineRegex{ + S: "bar", }, - }) + }, }) - t.Run("keep_if_contains-unused-regex", func(t *testing.T) { - f([]RelabelConfig{ - { - Action: "keep_if_contains", - TargetLabel: "foo", - SourceLabels: []string{"bar"}, - Regex: &MultiLineRegex{ - S: "bar", - }, + + // drop-missing-source-labels + f([]RelabelConfig{ + { + Action: "drop", + }, + }) + + // hashmod-missing-source-labels + f([]RelabelConfig{ + { + Action: "hashmod", + TargetLabel: "aaa", + Modulus: 123, + }, + }) + + // hashmod-missing-target-label + f([]RelabelConfig{ + { + Action: "hashmod", + SourceLabels: []string{"aaa"}, + Modulus: 123, + }, + }) + + // hashmod-missing-modulus + f([]RelabelConfig{ + { + Action: "hashmod", + SourceLabels: []string{"aaa"}, + TargetLabel: "xxx", + }, + }) + + // invalid-action + f([]RelabelConfig{ + { + Action: "invalid-action", + }, + }) + + // drop_metrics-missing-regex + f([]RelabelConfig{ + { + Action: "drop_metrics", + }, + }) + + // drop_metrics-non-empty-source-labels + f([]RelabelConfig{ + { + Action: "drop_metrics", + SourceLabels: []string{"foo"}, + Regex: &MultiLineRegex{ + S: "bar", }, - }) + }, }) - t.Run("drop_if_contains-missing-target-label", func(t *testing.T) { - f([]RelabelConfig{ - { - Action: "drop_if_contains", - SourceLabels: []string{"foo"}, + + // keep_metrics-missing-regex + f([]RelabelConfig{ + { + Action: "keep_metrics", + }, + }) + + // keep_metrics-non-empty-source-labels + f([]RelabelConfig{ + { + Action: "keep_metrics", + SourceLabels: []string{"foo"}, + Regex: &MultiLineRegex{ + S: "bar", }, - }) + }, }) - t.Run("drop_if_contains-missing-source-labels", func(t *testing.T) { - f([]RelabelConfig{ - { - Action: "drop_if_contains", - TargetLabel: "foo", + + // uppercase-missing-sourceLabels + f([]RelabelConfig{ + { + Action: "uppercase", + TargetLabel: "foobar", + }, + }) + + // lowercase-missing-targetLabel + f([]RelabelConfig{ + { + Action: "lowercase", + SourceLabels: []string{"foobar"}, + }, + }) + + // graphite-missing-match + f([]RelabelConfig{ + { + Action: "graphite", + Labels: map[string]string{ + "foo": "bar", }, - }) + }, }) - t.Run("drop_if_contains-unused-regex", func(t *testing.T) { - f([]RelabelConfig{ - { - Action: "drop_if_contains", - TargetLabel: "foo", - SourceLabels: []string{"bar"}, - Regex: &MultiLineRegex{ - S: "bar", - }, + + // graphite-missing-labels + f([]RelabelConfig{ + { + Action: "graphite", + Match: "foo.*.bar", + }, + }) + + // graphite-superflouous-sourceLabels + f([]RelabelConfig{ + { + Action: "graphite", + Match: "foo.*.bar", + Labels: map[string]string{ + "foo": "bar", }, - }) + SourceLabels: []string{"foo"}, + }, }) - t.Run("keep_if_equal-missing-source-labels", func(t *testing.T) { - f([]RelabelConfig{ - { - Action: "keep_if_equal", + + // graphite-superflouous-targetLabel + f([]RelabelConfig{ + { + Action: "graphite", + Match: "foo.*.bar", + Labels: map[string]string{ + "foo": "bar", }, - }) - }) - t.Run("keep_if_equal-single-source-label", func(t *testing.T) { - f([]RelabelConfig{ - { - Action: "keep_if_equal", - SourceLabels: []string{"foo"}, - }, - }) - }) - t.Run("keep_if_equal-unused-target-label", func(t *testing.T) { - f([]RelabelConfig{ - { - Action: "keep_if_equal", - SourceLabels: []string{"foo", "bar"}, - TargetLabel: "foo", - }, - }) - }) - t.Run("keep_if_equal-unused-regex", func(t *testing.T) { - f([]RelabelConfig{ - { - Action: "keep_if_equal", - SourceLabels: []string{"foo", "bar"}, - Regex: &MultiLineRegex{ - S: "bar", - }, - }, - }) - }) - t.Run("drop_if_equal-missing-source-labels", func(t *testing.T) { - f([]RelabelConfig{ - { - Action: "drop_if_equal", - }, - }) - }) - t.Run("drop_if_equal-single-source-label", func(t *testing.T) { - f([]RelabelConfig{ - { - Action: "drop_if_equal", - SourceLabels: []string{"foo"}, - }, - }) - }) - t.Run("drop_if_equal-unused-target-label", func(t *testing.T) { - f([]RelabelConfig{ - { - Action: "drop_if_equal", - SourceLabels: []string{"foo", "bar"}, - TargetLabel: "foo", - }, - }) - }) - t.Run("drop_if_equal-unused-regex", func(t *testing.T) { - f([]RelabelConfig{ - { - Action: "drop_if_equal", - SourceLabels: []string{"foo", "bar"}, - Regex: &MultiLineRegex{ - S: "bar", - }, - }, - }) - }) - t.Run("keepequal-missing-source-labels", func(t *testing.T) { - f([]RelabelConfig{ - { - Action: "keepequal", - }, - }) - }) - t.Run("keepequal-missing-target-label", func(t *testing.T) { - f([]RelabelConfig{ - { - Action: "keepequal", - SourceLabels: []string{"foo"}, - }, - }) - }) - t.Run("keepequal-unused-regex", func(t *testing.T) { - f([]RelabelConfig{ - { - Action: "keepequal", - SourceLabels: []string{"foo"}, - TargetLabel: "foo", - Regex: &MultiLineRegex{ - S: "bar", - }, - }, - }) - }) - t.Run("dropequal-missing-source-labels", func(t *testing.T) { - f([]RelabelConfig{ - { - Action: "dropequal", - }, - }) - }) - t.Run("dropequal-missing-target-label", func(t *testing.T) { - f([]RelabelConfig{ - { - Action: "dropequal", - SourceLabels: []string{"foo"}, - }, - }) - }) - t.Run("dropequal-unused-regex", func(t *testing.T) { - f([]RelabelConfig{ - { - Action: "dropequal", - SourceLabels: []string{"foo"}, - TargetLabel: "foo", - Regex: &MultiLineRegex{ - S: "bar", - }, - }, - }) - }) - t.Run("drop-missing-source-labels", func(t *testing.T) { - f([]RelabelConfig{ - { - Action: "drop", - }, - }) - }) - t.Run("hashmod-missing-source-labels", func(t *testing.T) { - f([]RelabelConfig{ - { - Action: "hashmod", - TargetLabel: "aaa", - Modulus: 123, - }, - }) - }) - t.Run("hashmod-missing-target-label", func(t *testing.T) { - f([]RelabelConfig{ - { - Action: "hashmod", - SourceLabels: []string{"aaa"}, - Modulus: 123, - }, - }) - }) - t.Run("hashmod-missing-modulus", func(t *testing.T) { - f([]RelabelConfig{ - { - Action: "hashmod", - SourceLabels: []string{"aaa"}, - TargetLabel: "xxx", - }, - }) - }) - t.Run("invalid-action", func(t *testing.T) { - f([]RelabelConfig{ - { - Action: "invalid-action", - }, - }) - }) - t.Run("drop_metrics-missing-regex", func(t *testing.T) { - f([]RelabelConfig{ - { - Action: "drop_metrics", - }, - }) - }) - t.Run("drop_metrics-non-empty-source-labels", func(t *testing.T) { - f([]RelabelConfig{ - { - Action: "drop_metrics", - SourceLabels: []string{"foo"}, - Regex: &MultiLineRegex{ - S: "bar", - }, - }, - }) - }) - t.Run("keep_metrics-missing-regex", func(t *testing.T) { - f([]RelabelConfig{ - { - Action: "keep_metrics", - }, - }) - }) - t.Run("keep_metrics-non-empty-source-labels", func(t *testing.T) { - f([]RelabelConfig{ - { - Action: "keep_metrics", - SourceLabels: []string{"foo"}, - Regex: &MultiLineRegex{ - S: "bar", - }, - }, - }) - }) - t.Run("uppercase-missing-sourceLabels", func(t *testing.T) { - f([]RelabelConfig{ - { - Action: "uppercase", - TargetLabel: "foobar", - }, - }) - }) - t.Run("lowercase-missing-targetLabel", func(t *testing.T) { - f([]RelabelConfig{ - { - Action: "lowercase", - SourceLabels: []string{"foobar"}, - }, - }) - }) - t.Run("graphite-missing-match", func(t *testing.T) { - f([]RelabelConfig{ - { - Action: "graphite", - Labels: map[string]string{ - "foo": "bar", - }, - }, - }) - }) - t.Run("graphite-missing-labels", func(t *testing.T) { - f([]RelabelConfig{ - { - Action: "graphite", - Match: "foo.*.bar", - }, - }) - }) - t.Run("graphite-superflouous-sourceLabels", func(t *testing.T) { - f([]RelabelConfig{ - { - Action: "graphite", - Match: "foo.*.bar", - Labels: map[string]string{ - "foo": "bar", - }, - SourceLabels: []string{"foo"}, - }, - }) - }) - t.Run("graphite-superflouous-targetLabel", func(t *testing.T) { - f([]RelabelConfig{ - { - Action: "graphite", - Match: "foo.*.bar", - Labels: map[string]string{ - "foo": "bar", - }, - TargetLabel: "foo", - }, - }) + TargetLabel: "foo", + }, }) + + // graphite-superflouous-replacement replacement := "foo" - t.Run("graphite-superflouous-replacement", func(t *testing.T) { - f([]RelabelConfig{ - { - Action: "graphite", - Match: "foo.*.bar", - Labels: map[string]string{ - "foo": "bar", - }, - Replacement: &replacement, + f([]RelabelConfig{ + { + Action: "graphite", + Match: "foo.*.bar", + Labels: map[string]string{ + "foo": "bar", }, - }) + Replacement: &replacement, + }, }) + + // graphite-superflouous-regex var re MultiLineRegex - t.Run("graphite-superflouous-regex", func(t *testing.T) { - f([]RelabelConfig{ - { - Action: "graphite", - Match: "foo.*.bar", - Labels: map[string]string{ - "foo": "bar", - }, - Regex: &re, + f([]RelabelConfig{ + { + Action: "graphite", + Match: "foo.*.bar", + Labels: map[string]string{ + "foo": "bar", }, - }) + Regex: &re, + }, }) - t.Run("non-graphite-superflouos-match", func(t *testing.T) { - f([]RelabelConfig{ - { - Action: "uppercase", - SourceLabels: []string{"foo"}, - TargetLabel: "foo", - Match: "aaa", - }, - }) + + // non-graphite-superflouos-match + f([]RelabelConfig{ + { + Action: "uppercase", + SourceLabels: []string{"foo"}, + TargetLabel: "foo", + Match: "aaa", + }, }) - t.Run("non-graphite-superflouos-labels", func(t *testing.T) { - f([]RelabelConfig{ - { - Action: "uppercase", - SourceLabels: []string{"foo"}, - TargetLabel: "foo", - Labels: map[string]string{ - "foo": "Bar", - }, + + // non-graphite-superflouos-labels + f([]RelabelConfig{ + { + Action: "uppercase", + SourceLabels: []string{"foo"}, + TargetLabel: "foo", + Labels: map[string]string{ + "foo": "Bar", }, - }) + }, }) } diff --git a/lib/promrelabel/relabel_test.go b/lib/promrelabel/relabel_test.go index bd2963a51..8076b0724 100644 --- a/lib/promrelabel/relabel_test.go +++ b/lib/promrelabel/relabel_test.go @@ -181,146 +181,147 @@ func TestParsedRelabelConfigsApply(t *testing.T) { t.Fatalf("unexpected result; got\n%s\nwant\n%s", result, resultExpected) } } - t.Run("empty_relabel_configs", func(t *testing.T) { - f("", `{}`, false, `{}`) - f("", `{}`, true, `{}`) - f("", `{foo="bar"}`, false, `{foo="bar"}`) - f("", `xxx{foo="bar",__aaa="yyy"}`, false, `xxx{__aaa="yyy",foo="bar"}`) - f("", `xxx{foo="bar",__aaa="yyy"}`, true, `xxx{foo="bar"}`) - }) - t.Run("replace-miss", func(t *testing.T) { - f(` + + // empty_relabel_configs + f("", `{}`, false, `{}`) + f("", `{}`, true, `{}`) + f("", `{foo="bar"}`, false, `{foo="bar"}`) + f("", `xxx{foo="bar",__aaa="yyy"}`, false, `xxx{__aaa="yyy",foo="bar"}`) + f("", `xxx{foo="bar",__aaa="yyy"}`, true, `xxx{foo="bar"}`) + + // replace-miss" + f(` - action: replace target_label: bar `, `{}`, false, `{}`) - f(` + f(` - action: replace source_labels: ["foo"] target_label: bar `, `{}`, false, `{}`) - f(` + f(` - action: replace source_labels: ["foo"] target_label: "bar" `, `{xxx="yyy"}`, false, `{xxx="yyy"}`) - f(` + f(` - action: replace source_labels: ["foo"] target_label: "bar" regex: ".+" `, `{xxx="yyy"}`, false, `{xxx="yyy"}`) - f(` + f(` - action: replace source_labels: ["foo"] target_label: "xxx" regex: ".+" `, `{xxx="yyy"}`, false, `{xxx="yyy"}`) - }) - t.Run("replace-if-miss", func(t *testing.T) { - f(` + + // replace-if-miss + f(` - action: replace if: '{foo="bar"}' source_labels: ["xxx", "foo"] target_label: "bar" replacement: "a-$1-b" `, `{xxx="yyy"}`, false, `{xxx="yyy"}`) - }) - t.Run("replace-hit", func(t *testing.T) { - f(` + + // replace-hit + f(` - action: replace source_labels: ["xxx", "foo"] target_label: "bar" replacement: "a-$1-b" `, `{xxx="yyy"}`, false, `{bar="a-yyy;-b",xxx="yyy"}`) - f(` + f(` - action: replace source_labels: ["xxx", "foo"] target_label: "xxx" `, `{xxx="yyy"}`, false, `{xxx="yyy;"}`) - f(` + f(` - action: replace source_labels: ["foo"] target_label: "xxx" `, `{xxx="yyy"}`, false, `{}`) - }) - t.Run("replace-if-hit", func(t *testing.T) { - f(` + + // replace-if-hit + f(` - action: replace if: '{xxx=~".y."}' source_labels: ["xxx", "foo"] target_label: "bar" replacement: "a-$1-b" `, `{xxx="yyy"}`, false, `{bar="a-yyy;-b",xxx="yyy"}`) - }) - t.Run("replace-remove-label-value-hit", func(t *testing.T) { - f(` + + // replace-remove-label-value-hit + f(` - action: replace source_labels: ["foo"] target_label: "foo" regex: "xxx" replacement: "" `, `{foo="xxx",bar="baz"}`, false, `{bar="baz"}`) - }) - t.Run("replace-remove-label-value-miss", func(t *testing.T) { - f(` + + // replace-remove-label-value-miss + f(` - action: replace source_labels: ["foo"] target_label: "foo" regex: "xxx" replacement: "" `, `{foo="yyy",bar="baz"}`, false, `{bar="baz",foo="yyy"}`) - }) - t.Run("replace-hit-remove-label", func(t *testing.T) { - f(` + + // replace-hit-remove-label + f(` - action: replace source_labels: ["xxx", "foo"] regex: "yyy;.+" target_label: "foo" replacement: "" `, `{xxx="yyy",foo="bar"}`, false, `{xxx="yyy"}`) - }) - t.Run("replace-miss-remove-label", func(t *testing.T) { - f(` + + // replace-miss-remove-label + f(` - action: replace source_labels: ["xxx", "foo"] regex: "yyy;.+" target_label: "foo" replacement: "" `, `{xxx="yyyz",foo="bar"}`, false, `{foo="bar",xxx="yyyz"}`) - }) - t.Run("replace-hit-target-label-with-capture-group", func(t *testing.T) { - f(` + + // replace-hit-target-label-with-capture-group + f(` - action: replace source_labels: ["xxx", "foo"] target_label: "bar-$1" replacement: "a-$1-b" `, `{xxx="yyy"}`, false, `{bar-yyy;="a-yyy;-b",xxx="yyy"}`) - }) - t.Run("replace_all-miss", func(t *testing.T) { - f(` + + // replace_all-miss + f(` - action: replace_all source_labels: [foo] target_label: "bar" `, `{}`, false, `{}`) - f(` + f(` - action: replace_all source_labels: ["foo"] target_label: "bar" `, `{}`, false, `{}`) - f(` + f(` - action: replace_all source_labels: ["foo"] target_label: "bar" `, `{xxx="yyy"}`, false, `{xxx="yyy"}`) - f(` + f(` - action: replace_all source_labels: ["foo"] target_label: "bar" regex: ".+" `, `{xxx="yyy"}`, false, `{xxx="yyy"}`) - }) - t.Run("replace_all-if-miss", func(t *testing.T) { - f(` + + // replace_all-if-miss + f(` - action: replace_all if: 'foo' source_labels: ["xxx"] @@ -328,18 +329,18 @@ func TestParsedRelabelConfigsApply(t *testing.T) { regex: "-" replacement: "." `, `{xxx="a-b-c"}`, false, `{xxx="a-b-c"}`) - }) - t.Run("replace_all-hit", func(t *testing.T) { - f(` + + // replace_all-hit + f(` - action: replace_all source_labels: ["xxx"] target_label: "xxx" regex: "-" replacement: "." `, `{xxx="a-b-c"}`, false, `{xxx="a.b.c"}`) - }) - t.Run("replace_all-if-hit", func(t *testing.T) { - f(` + + // replace_all-if-hit + f(` - action: replace_all if: '{non_existing_label=~".*"}' source_labels: ["xxx"] @@ -347,18 +348,18 @@ func TestParsedRelabelConfigsApply(t *testing.T) { regex: "-" replacement: "." `, `{xxx="a-b-c"}`, false, `{xxx="a.b.c"}`) - }) - t.Run("replace_all-regex-hit", func(t *testing.T) { - f(` + + // replace_all-regex-hit + f(` - action: replace_all source_labels: ["xxx", "foo"] target_label: "xxx" regex: "(;)" replacement: "-$1-" `, `{xxx="y;y"}`, false, `{xxx="y-;-y-;-"}`) - }) - t.Run("replace-add-multi-labels", func(t *testing.T) { - f(` + + // replace-add-multi-labels + f(` - action: replace source_labels: ["xxx"] target_label: "bar" @@ -368,516 +369,522 @@ func TestParsedRelabelConfigsApply(t *testing.T) { target_label: "zar" replacement: "b-$1" `, `{xxx="yyy",instance="a.bc"}`, true, `{bar="a-yyy",instance="a.bc",xxx="yyy",zar="b-a-yyy"}`) - }) - t.Run("replace-self", func(t *testing.T) { - f(` + + // replace-self + f(` - action: replace source_labels: ["foo"] target_label: "foo" replacement: "a-$1" `, `{foo="aaxx"}`, true, `{foo="a-aaxx"}`) - }) - t.Run("replace-missing-source", func(t *testing.T) { - f(` + + // replace-missing-source + f(` - action: replace target_label: foo replacement: "foobar" `, `{}`, true, `{foo="foobar"}`) - }) - t.Run("keep_if_contains-non-existing-target-and-source", func(t *testing.T) { - f(` + + // keep_if_contains-non-existing-target-and-source + f(` - action: keep_if_contains target_label: foo source_labels: [bar] `, `{x="y"}`, true, `{x="y"}`) - }) - t.Run("keep_if_contains-non-existing-target", func(t *testing.T) { - f(` + + // keep_if_contains-non-existing-target + f(` - action: keep_if_contains target_label: foo source_labels: [bar] `, `{bar="aaa"}`, true, `{}`) - }) - t.Run("keep_if_contains-non-existing-source", func(t *testing.T) { - f(` + + // keep_if_contains-non-existing-source + f(` - action: keep_if_contains target_label: foo source_labels: [bar] `, `{foo="aaa"}`, true, `{foo="aaa"}`) - }) - t.Run("keep_if_contains-matching-source-target", func(t *testing.T) { - f(` + + // keep_if_contains-matching-source-target + f(` - action: keep_if_contains target_label: foo source_labels: [bar] `, `{bar="aaa",foo="aaa"}`, true, `{bar="aaa",foo="aaa"}`) - }) - t.Run("keep_if_contains-matching-sources-target", func(t *testing.T) { - f(` + + // keep_if_contains-matching-sources-target + f(` - action: keep_if_contains target_label: foo source_labels: [bar, baz] `, `{bar="aaa",foo="aaa",baz="aaa"}`, true, `{bar="aaa",baz="aaa",foo="aaa"}`) - }) - t.Run("keep_if_contains-mismatching-source-target", func(t *testing.T) { - f(` + + // keep_if_contains-mismatching-source-target + f(` - action: keep_if_contains target_label: foo source_labels: [bar] `, `{bar="aaa",foo="bbb"}`, true, `{}`) - }) - t.Run("keep_if_contains-mismatching-sources-target", func(t *testing.T) { - f(` + + // keep_if_contains-mismatching-sources-target + f(` - action: keep_if_contains target_label: foo source_labels: [bar, baz] `, `{bar="aaa",foo="aaa",baz="bbb"}`, true, `{}`) - }) - t.Run("drop_if_contains-non-existing-target-and-source", func(t *testing.T) { - f(` + + // drop_if_contains-non-existing-target-and-source + f(` - action: drop_if_contains target_label: foo source_labels: [bar] `, `{x="y"}`, true, `{}`) - }) - t.Run("drop_if_contains-non-existing-target", func(t *testing.T) { - f(` + + // drop_if_contains-non-existing-target + f(` - action: drop_if_contains target_label: foo source_labels: [bar] `, `{bar="aaa"}`, true, `{bar="aaa"}`) - }) - t.Run("drop_if_contains-non-existing-source", func(t *testing.T) { - f(` + + // drop_if_contains-non-existing-source + f(` - action: drop_if_contains target_label: foo source_labels: [bar] `, `{foo="aaa"}`, true, `{}`) - }) - t.Run("drop_if_contains-matching-source-target", func(t *testing.T) { - f(` + + // drop_if_contains-matching-source-target + f(` - action: drop_if_contains target_label: foo source_labels: [bar] `, `{bar="aaa",foo="aaa"}`, true, `{}`) - }) - t.Run("drop_if_contains-matching-sources-target", func(t *testing.T) { - f(` + + // drop_if_contains-matching-sources-target + f(` - action: drop_if_contains target_label: foo source_labels: [bar, baz] `, `{bar="aaa",foo="aaa",baz="aaa"}`, true, `{}`) - }) - t.Run("drop_if_contains-mismatching-source-target", func(t *testing.T) { - f(` + + // drop_if_contains-mismatching-source-target + f(` - action: drop_if_contains target_label: foo source_labels: [bar] `, `{bar="aaa",foo="bbb"}`, true, `{bar="aaa",foo="bbb"}`) - }) - t.Run("drop_if_contains-mismatching-sources-target", func(t *testing.T) { - f(` + + // drop_if_contains-mismatching-sources-target + f(` - action: drop_if_contains target_label: foo source_labels: [bar, baz] `, `{bar="aaa",foo="aaa",baz="bbb"}`, true, `{bar="aaa",baz="bbb",foo="aaa"}`) - }) - t.Run("keep_if_equal-miss", func(t *testing.T) { - f(` + + // keep_if_equal-miss + f(` - action: keep_if_equal source_labels: ["foo", "bar"] `, `{}`, true, `{}`) - f(` + f(` - action: keep_if_equal source_labels: ["xxx", "bar"] `, `{xxx="yyy"}`, true, `{}`) - }) - t.Run("keep_if_equal-hit", func(t *testing.T) { - f(` + + // keep_if_equal-hit + f(` - action: keep_if_equal source_labels: ["xxx", "bar"] `, `{xxx="yyy",bar="yyy"}`, true, `{bar="yyy",xxx="yyy"}`) - }) - t.Run("drop_if_equal-miss", func(t *testing.T) { - f(` + + // drop_if_equal-miss + f(` - action: drop_if_equal source_labels: ["foo", "bar"] `, `{}`, true, `{}`) - f(` + f(` - action: drop_if_equal source_labels: ["xxx", "bar"] `, `{xxx="yyy"}`, true, `{xxx="yyy"}`) - }) - t.Run("drop_if_equal-hit", func(t *testing.T) { - f(` + + // drop_if_equal-hit + f(` - action: drop_if_equal source_labels: [xxx, bar] `, `{xxx="yyy",bar="yyy"}`, true, `{}`) - }) - t.Run("keepequal-hit", func(t *testing.T) { - f(` + + // keepequal-hit + f(` - action: keepequal source_labels: [foo] target_label: bar `, `{foo="a",bar="a"}`, true, `{bar="a",foo="a"}`) - }) - t.Run("keepequal-miss", func(t *testing.T) { - f(` + + // keepequal-miss + f(` - action: keepequal source_labels: [foo] target_label: bar `, `{foo="a",bar="x"}`, true, `{}`) - }) - t.Run("dropequal-hit", func(t *testing.T) { - f(` + + // dropequal-hit + f(` - action: dropequal source_labels: [foo] target_label: bar `, `{foo="a",bar="a"}`, true, `{}`) - }) - t.Run("dropequal-miss", func(t *testing.T) { - f(` + + // dropequal-miss + f(` - action: dropequal source_labels: [foo] target_label: bar `, `{foo="a",bar="x"}`, true, `{bar="x",foo="a"}`) - }) - t.Run("keep-miss", func(t *testing.T) { - f(` + + // keep-miss + f(` - action: keep source_labels: [foo] regex: ".+" `, `{}`, true, `{}`) - f(` + f(` - action: keep source_labels: [foo] regex: ".+" `, `{xxx="yyy"}`, true, `{}`) - }) - t.Run("keep-if-miss", func(t *testing.T) { - f(` + + // keep-if-miss + f(` - action: keep if: '{foo="bar"}' `, `{foo="yyy"}`, false, `{}`) - }) - t.Run("keep-if-hit", func(t *testing.T) { - f(` + + // keep-if-hit + f(` - action: keep if: ['foobar', '{foo="yyy"}', '{a="b"}'] `, `{foo="yyy"}`, false, `{foo="yyy"}`) - }) - t.Run("keep-hit", func(t *testing.T) { - f(` + + // keep-hit + f(` - action: keep source_labels: [foo] regex: "yyy" `, `{foo="yyy"}`, false, `{foo="yyy"}`) - }) - t.Run("keep-hit-regexp", func(t *testing.T) { - f(` + + // keep-hit-regexp + f(` - action: keep source_labels: ["foo"] regex: ".+" `, `{foo="yyy"}`, false, `{foo="yyy"}`) - }) - t.Run("keep_metrics-miss", func(t *testing.T) { - f(` + + // keep_metrics-miss + f(` - action: keep_metrics regex: - foo - bar `, `xxx`, true, `{}`) - }) - t.Run("keep_metrics-if-miss", func(t *testing.T) { - f(` + + // keep_metrics-if-miss + f(` - action: keep_metrics if: 'bar' `, `foo`, true, `{}`) - }) - t.Run("keep_metrics-if-hit", func(t *testing.T) { - f(` + + // keep_metrics-if-hit + f(` - action: keep_metrics if: 'foo' `, `foo`, true, `foo`) - }) - t.Run("keep_metrics-hit", func(t *testing.T) { - f(` + + // keep_metrics-hit + f(` - action: keep_metrics regex: - foo - bar `, `foo`, true, `foo`) - }) - t.Run("drop-miss", func(t *testing.T) { - f(` + + // drop-miss + f(` - action: drop source_labels: [foo] regex: ".+" `, `{}`, false, `{}`) - f(` + f(` - action: drop source_labels: [foo] regex: ".+" `, `{xxx="yyy"}`, true, `{xxx="yyy"}`) - }) - t.Run("drop-if-miss", func(t *testing.T) { - f(` + + // drop-if-miss + f(` - action: drop if: '{foo="bar"}' `, `{foo="yyy"}`, true, `{foo="yyy"}`) - }) - t.Run("drop-if-hit", func(t *testing.T) { - f(` + + // drop-if-hit + f(` - action: drop if: '{foo="yyy"}' `, `{foo="yyy"}`, true, `{}`) - }) - t.Run("drop-hit", func(t *testing.T) { - f(` + + // drop-hit + f(` - action: drop source_labels: [foo] regex: yyy `, `{foo="yyy"}`, true, `{}`) - }) - t.Run("drop-hit-regexp", func(t *testing.T) { - f(` + + // drop-hit-regexp + f(` - action: drop source_labels: [foo] regex: ".+" `, `{foo="yyy"}`, true, `{}`) - }) - t.Run("drop_metrics-miss", func(t *testing.T) { - f(` + + // drop_metrics-miss + f(` - action: drop_metrics regex: - foo - bar `, `xxx`, true, `xxx`) - }) - t.Run("drop_metrics-if-miss", func(t *testing.T) { - f(` + + // drop_metrics-if-miss + f(` - action: drop_metrics if: bar `, `foo`, true, `foo`) - }) - t.Run("drop_metrics-if-hit", func(t *testing.T) { - f(` + + // drop_metrics-if-hit + f(` - action: drop_metrics if: foo `, `foo`, true, `{}`) - }) - t.Run("drop_metrics-hit", func(t *testing.T) { - f(` + + // drop_metrics-hit + f(` - action: drop_metrics regex: - foo - bar `, `foo`, true, `{}`) - }) - t.Run("hashmod-miss", func(t *testing.T) { - f(` + + // hashmod-miss + f(` - action: hashmod source_labels: [foo] target_label: aaa modulus: 123 `, `{xxx="yyy"}`, false, `{aaa="81",xxx="yyy"}`) - }) - t.Run("hashmod-if-miss", func(t *testing.T) { - f(` + + // hashmod-if-miss + f(` - action: hashmod if: '{foo="bar"}' source_labels: [foo] target_label: aaa modulus: 123 `, `{foo="yyy"}`, true, `{foo="yyy"}`) - }) - t.Run("hashmod-if-hit", func(t *testing.T) { - f(` + + // hashmod-if-hit + f(` - action: hashmod if: '{foo="yyy"}' source_labels: [foo] target_label: aaa modulus: 123 `, `{foo="yyy"}`, true, `{aaa="73",foo="yyy"}`) - }) - t.Run("hashmod-hit", func(t *testing.T) { - f(` + + // hashmod-hit + f(` - action: hashmod source_labels: [foo] target_label: aaa modulus: 123 `, `{foo="yyy"}`, true, `{aaa="73",foo="yyy"}`) - }) - t.Run("labelmap-copy-label-if-miss", func(t *testing.T) { - f(` + + // labelmap-copy-label-if-miss + f(` - action: labelmap if: '{foo="yyy",foobar="aab"}' regex: "foo" replacement: "bar" `, `{foo="yyy",foobar="aaa"}`, true, `{foo="yyy",foobar="aaa"}`) - }) - t.Run("labelmap-copy-label-if-hit", func(t *testing.T) { - f(` + + // labelmap-copy-label-if-hit + f(` - action: labelmap if: '{foo="yyy",foobar="aaa"}' regex: "foo" replacement: "bar" `, `{foo="yyy",foobar="aaa"}`, true, `{bar="yyy",foo="yyy",foobar="aaa"}`) - }) - t.Run("labelmap-copy-label", func(t *testing.T) { - f(` + + // labelmap-copy-label + f(` - action: labelmap regex: "foo" replacement: "bar" `, `{foo="yyy",foobar="aaa"}`, true, `{bar="yyy",foo="yyy",foobar="aaa"}`) - }) - t.Run("labelmap-remove-prefix-dot-star", func(t *testing.T) { - f(` + + // labelmap-remove-prefix-dot-star + f(` - action: labelmap regex: "foo(.*)" `, `{xoo="yyy",foobar="aaa"}`, true, `{bar="aaa",foobar="aaa",xoo="yyy"}`) - }) - t.Run("labelmap-remove-prefix-dot-plus", func(t *testing.T) { - f(` + + // labelmap-remove-prefix-dot-plus + f(` - action: labelmap regex: "foo(.+)" `, `{foo="yyy",foobar="aaa"}`, true, `{bar="aaa",foo="yyy",foobar="aaa"}`) - }) - t.Run("labelmap-regex", func(t *testing.T) { - f(` + + // labelmap-regex + f(` - action: labelmap regex: "foo(.+)" replacement: "$1-x" `, `{foo="yyy",foobar="aaa"}`, true, `{bar-x="aaa",foo="yyy",foobar="aaa"}`) - }) - t.Run("labelmap_all-if-miss", func(t *testing.T) { - f(` + + // labelmap_all-if-miss + f(` - action: labelmap_all if: foobar regex: "\\." replacement: "-" `, `{foo.bar.baz="yyy",foobar="aaa"}`, true, `{foo.bar.baz="yyy",foobar="aaa"}`) - }) - t.Run("labelmap_all-if-hit", func(t *testing.T) { - f(` + + // labelmap_all-if-hit + f(` - action: labelmap_all if: '{foo.bar.baz="yyy"}' regex: "\\." replacement: "-" `, `{foo.bar.baz="yyy",foobar="aaa"}`, true, `{foo-bar-baz="yyy",foobar="aaa"}`) - }) - t.Run("labelmap_all", func(t *testing.T) { - f(` + + // labelmap_all + f(` - action: labelmap_all regex: "\\." replacement: "-" `, `{foo.bar.baz="yyy",foobar="aaa"}`, true, `{foo-bar-baz="yyy",foobar="aaa"}`) - }) - t.Run("labelmap_all-regexp", func(t *testing.T) { - f(` + + // labelmap_all-regexp + f(` - action: labelmap_all regex: "ba(.)" replacement: "${1}ss" `, `{foo.bar.baz="yyy",foozar="aaa"}`, true, `{foo.rss.zss="yyy",foozar="aaa"}`) - }) - t.Run("labeldrop", func(t *testing.T) { - f(` + + // labeldrop + f(` - action: labeldrop regex: dropme `, `{aaa="bbb"}`, true, `{aaa="bbb"}`) - // if-miss - f(` + + // if-miss + f(` - action: labeldrop if: foo regex: dropme `, `{xxx="yyy",dropme="aaa",foo="bar"}`, false, `{dropme="aaa",foo="bar",xxx="yyy"}`) - // if-hit - f(` + + // if-hit + f(` - action: labeldrop if: '{xxx="yyy"}' regex: dropme `, `{xxx="yyy",dropme="aaa",foo="bar"}`, false, `{foo="bar",xxx="yyy"}`) - f(` + f(` - action: labeldrop regex: dropme `, `{xxx="yyy",dropme="aaa",foo="bar"}`, false, `{foo="bar",xxx="yyy"}`) - // regex in single quotes - f(` + + // regex in single quotes + f(` - action: labeldrop regex: 'dropme' `, `{xxx="yyy",dropme="aaa"}`, false, `{xxx="yyy"}`) - // regex in double quotes - f(` + + // regex in double quotes + f(` - action: labeldrop regex: "dropme" `, `{xxx="yyy",dropme="aaa"}`, false, `{xxx="yyy"}`) - }) - t.Run("labeldrop-prefix", func(t *testing.T) { - f(` + + // labeldrop-prefix + f(` - action: labeldrop regex: "dropme.*" `, `{aaa="bbb"}`, true, `{aaa="bbb"}`) - f(` + f(` - action: labeldrop regex: "dropme(.+)" `, `{xxx="yyy",dropme-please="aaa",foo="bar"}`, false, `{foo="bar",xxx="yyy"}`) - }) - t.Run("labeldrop-regexp", func(t *testing.T) { - f(` + + // labeldrop-regexp + f(` - action: labeldrop regex: ".*dropme.*" `, `{aaa="bbb"}`, true, `{aaa="bbb"}`) - f(` + f(` - action: labeldrop regex: ".*dropme.*" `, `{xxx="yyy",dropme-please="aaa",foo="bar"}`, false, `{foo="bar",xxx="yyy"}`) - }) - t.Run("labelkeep", func(t *testing.T) { - f(` + + // labelkeep + f(` - action: labelkeep regex: "keepme" `, `{keepme="aaa"}`, true, `{keepme="aaa"}`) - // if-miss - f(` + + // if-miss + f(` - action: labelkeep if: '{aaaa="awefx"}' regex: keepme `, `{keepme="aaa",aaaa="awef",keepme-aaa="234"}`, false, `{aaaa="awef",keepme="aaa",keepme-aaa="234"}`) - // if-hit - f(` + + // if-hit + f(` - action: labelkeep if: '{aaaa="awef"}' regex: keepme `, `{keepme="aaa",aaaa="awef",keepme-aaa="234"}`, false, `{keepme="aaa"}`) - f(` + f(` - action: labelkeep regex: keepme `, `{keepme="aaa",aaaa="awef",keepme-aaa="234"}`, false, `{keepme="aaa"}`) - }) - t.Run("labelkeep-regexp", func(t *testing.T) { - f(` + + // labelkeep-regexp + f(` - action: labelkeep regex: "keepme.*" `, `{keepme="aaa"}`, true, `{keepme="aaa"}`) - f(` + f(` - action: labelkeep regex: "keepme.*" `, `{keepme="aaa",aaaa="awef",keepme-aaa="234"}`, false, `{keepme="aaa",keepme-aaa="234"}`) - }) - t.Run("upper-lower-case", func(t *testing.T) { - f(` + + // upper-lower-case + f(` - action: uppercase source_labels: ["foo"] target_label: foo `, `{foo="bar"}`, true, `{foo="BAR"}`) - f(` + f(` - action: lowercase source_labels: ["foo", "bar"] target_label: baz - action: labeldrop regex: foo|bar `, `{foo="BaR",bar="fOO"}`, true, `{baz="bar;foo"}`) - f(` + f(` - action: lowercase source_labels: ["foo"] target_label: baz @@ -885,50 +892,50 @@ func TestParsedRelabelConfigsApply(t *testing.T) { source_labels: ["bar"] target_label: baz `, `{qux="quux"}`, true, `{qux="quux"}`) - }) - t.Run("graphite-match", func(t *testing.T) { - f(` + + // graphite-match + f(` - action: graphite match: foo.*.baz labels: __name__: aaa job: ${1}-zz `, `foo.bar.baz`, true, `aaa{job="bar-zz"}`) - }) - t.Run("graphite-mismatch", func(t *testing.T) { - f(` + + // graphite-mismatch + f(` - action: graphite match: foo.*.baz labels: __name__: aaa job: ${1}-zz `, `foo.bar.bazz`, true, `foo.bar.bazz`) - }) - t.Run("replacement-with-label-refs", func(t *testing.T) { - // no regex - f(` + + // replacement-with-label-refs + // no regex + f(` - target_label: abc replacement: "{{__name__}}.{{foo}}" `, `qwe{foo="bar",baz="aaa"}`, true, `qwe{abc="qwe.bar",baz="aaa",foo="bar"}`) - // with regex - f(` + + // with regex + f(` - target_label: abc replacement: "{{__name__}}.{{foo}}.$1" source_labels: [baz] regex: "a(.+)" `, `qwe{foo="bar",baz="aaa"}`, true, `qwe{abc="qwe.bar.aa",baz="aaa",foo="bar"}`) - }) + // Check $ at the end of regex - see https://github.com/VictoriaMetrics/VictoriaMetrics/issues/3131 - t.Run("replacement-with-$-at-the-end-of-regex", func(t *testing.T) { - f(` + f(` - target_label: xyz regex: "foo\\$$" replacement: bar source_labels: [xyz] `, `metric{xyz="foo$",a="b"}`, true, `metric{a="b",xyz="bar"}`) - }) - t.Run("issue-3251", func(t *testing.T) { - f(` + + // issue-3251 + f(` - source_labels: [instance, container_label_com_docker_swarm_task_name] separator: ';' # regex: '(.*?)\..*;(.*?)\..*' @@ -937,8 +944,7 @@ func TestParsedRelabelConfigsApply(t *testing.T) { target_label: container_label_com_docker_swarm_task_name action: replace `, `{instance="subdomain.domain.com",container_label_com_docker_swarm_task_name="myservice.h408nlaxmv8oqkn1pjjtd71to.nv987lz99rb27lkjjnfiay0g4"}`, true, - `{container_label_com_docker_swarm_task_name="myservice:subdomain",instance="subdomain.domain.com"}`) - }) + `{container_label_com_docker_swarm_task_name="myservice:subdomain",instance="subdomain.domain.com"}`) } func TestFinalizeLabels(t *testing.T) { @@ -1064,14 +1070,13 @@ func TestParsedRelabelConfigsApplyForMultipleSeries(t *testing.T) { } } - t.Run("drops one of series", func(t *testing.T) { - f(` + // drops one of series + f(` - action: drop if: '{__name__!~"smth"}' `, []string{`smth`, `notthis`}, []string{`smth`}) - f(` + f(` - action: drop if: '{__name__!~"smth"}' `, []string{`notthis`, `smth`}, []string{`smth`}) - }) } diff --git a/lib/promscrape/client.go b/lib/promscrape/client.go index 48a23dc05..9130b4613 100644 --- a/lib/promscrape/client.go +++ b/lib/promscrape/client.go @@ -54,7 +54,7 @@ func newClient(ctx context.Context, sw *ScrapeWork) (*client, error) { setHeaders := func(req *http.Request) error { return sw.AuthConfig.SetHeaders(req, true) } - setProxyHeaders := func(req *http.Request) error { + setProxyHeaders := func(_ *http.Request) error { return nil } proxyURL := sw.ProxyURL @@ -90,7 +90,7 @@ func newClient(ctx context.Context, sw *ScrapeWork) (*client, error) { Timeout: sw.ScrapeTimeout, } if sw.DenyRedirects { - hc.CheckRedirect = func(req *http.Request, via []*http.Request) error { + hc.CheckRedirect = func(_ *http.Request, _ []*http.Request) error { return http.ErrUseLastResponse } } diff --git a/lib/promscrape/discoveryutils/client.go b/lib/promscrape/discoveryutils/client.go index a97b05be7..52736f22a 100644 --- a/lib/promscrape/discoveryutils/client.go +++ b/lib/promscrape/discoveryutils/client.go @@ -144,20 +144,20 @@ func NewClient(apiServer string, ac *promauth.Config, proxyURL *proxy.URL, proxy }, } - setHTTPHeaders := func(req *http.Request) error { return nil } + setHTTPHeaders := func(_ *http.Request) error { return nil } if ac != nil { setHTTPHeaders = func(req *http.Request) error { return ac.SetHeaders(req, true) } } if httpCfg.FollowRedirects != nil && !*httpCfg.FollowRedirects { - checkRedirect := func(req *http.Request, via []*http.Request) error { + checkRedirect := func(_ *http.Request, _ []*http.Request) error { return http.ErrUseLastResponse } client.CheckRedirect = checkRedirect blockingClient.CheckRedirect = checkRedirect } - setHTTPProxyHeaders := func(req *http.Request) error { return nil } + setHTTPProxyHeaders := func(_ *http.Request) error { return nil } if proxyAC != nil { setHTTPProxyHeaders = func(req *http.Request) error { return proxyURL.SetHeaders(proxyAC, req) diff --git a/lib/promscrape/scraper.go b/lib/promscrape/scraper.go index 87b52a5b3..eb9f224c9 100644 --- a/lib/promscrape/scraper.go +++ b/lib/promscrape/scraper.go @@ -141,7 +141,7 @@ func runScraper(configFile string, pushData func(at *auth.Token, wr *prompbmarsh scs.add("nomad_sd_configs", *nomad.SDCheckInterval, func(cfg *Config, swsPrev []*ScrapeWork) []*ScrapeWork { return cfg.getNomadSDScrapeWork(swsPrev) }) scs.add("openstack_sd_configs", *openstack.SDCheckInterval, func(cfg *Config, swsPrev []*ScrapeWork) []*ScrapeWork { return cfg.getOpenStackSDScrapeWork(swsPrev) }) scs.add("yandexcloud_sd_configs", *yandexcloud.SDCheckInterval, func(cfg *Config, swsPrev []*ScrapeWork) []*ScrapeWork { return cfg.getYandexCloudSDScrapeWork(swsPrev) }) - scs.add("static_configs", 0, func(cfg *Config, swsPrev []*ScrapeWork) []*ScrapeWork { return cfg.getStaticScrapeWork() }) + scs.add("static_configs", 0, func(cfg *Config, _ []*ScrapeWork) []*ScrapeWork { return cfg.getStaticScrapeWork() }) var tickerCh <-chan time.Time if *configCheckInterval > 0 { diff --git a/lib/promscrape/scrapework_test.go b/lib/promscrape/scrapework_test.go index af874380e..787203d29 100644 --- a/lib/promscrape/scrapework_test.go +++ b/lib/promscrape/scrapework_test.go @@ -90,14 +90,14 @@ func TestScrapeWorkScrapeInternalFailure(t *testing.T) { } readDataCalls := 0 - sw.ReadData = func(dst *bytesutil.ByteBuffer) error { + sw.ReadData = func(_ *bytesutil.ByteBuffer) error { readDataCalls++ return fmt.Errorf("error when reading data") } pushDataCalls := 0 var pushDataErr error - sw.PushData = func(at *auth.Token, wr *prompbmarshal.WriteRequest) { + sw.PushData = func(_ *auth.Token, wr *prompbmarshal.WriteRequest) { if err := expectEqualTimeseries(wr.Timeseries, timeseriesExpected); err != nil { pushDataErr = fmt.Errorf("unexpected data pushed: %w\ngot\n%#v\nwant\n%#v", err, wr.Timeseries, timeseriesExpected) } @@ -139,7 +139,7 @@ func TestScrapeWorkScrapeInternalSuccess(t *testing.T) { pushDataCalls := 0 var pushDataErr error - sw.PushData = func(at *auth.Token, wr *prompbmarshal.WriteRequest) { + sw.PushData = func(_ *auth.Token, wr *prompbmarshal.WriteRequest) { pushDataCalls++ if len(wr.Timeseries) > len(timeseriesExpected) { pushDataErr = fmt.Errorf("too many time series obtained; got %d; want %d\ngot\n%+v\nwant\n%+v", @@ -721,7 +721,7 @@ func TestSendStaleSeries(t *testing.T) { defer common.StopUnmarshalWorkers() var staleMarks int - sw.PushData = func(at *auth.Token, wr *prompbmarshal.WriteRequest) { + sw.PushData = func(_ *auth.Token, wr *prompbmarshal.WriteRequest) { staleMarks += len(wr.Timeseries) } sw.sendStaleSeries(lastScrape, currScrape, 0, false) diff --git a/lib/promscrape/scrapework_timing_test.go b/lib/promscrape/scrapework_timing_test.go index 2902b3e36..71d862645 100644 --- a/lib/promscrape/scrapework_timing_test.go +++ b/lib/promscrape/scrapework_timing_test.go @@ -84,7 +84,7 @@ vm_tcplistener_write_calls_total{name="https", addr=":443"} 132356 var sw scrapeWork sw.Config = &ScrapeWork{} sw.ReadData = readDataFunc - sw.PushData = func(at *auth.Token, wr *prompbmarshal.WriteRequest) {} + sw.PushData = func(_ *auth.Token, _ *prompbmarshal.WriteRequest) {} tsmGlobal.Register(&sw) timestamp := int64(0) for pb.Next() { diff --git a/lib/protoparser/newrelic/stream/streamparser_test.go b/lib/protoparser/newrelic/stream/streamparser_test.go index 4f9b3ab97..54fb442a3 100644 --- a/lib/protoparser/newrelic/stream/streamparser_test.go +++ b/lib/protoparser/newrelic/stream/streamparser_test.go @@ -14,7 +14,7 @@ func TestParseFailure(t *testing.T) { f := func(req string) { t.Helper() - callback := func(rows []newrelic.Row) error { + callback := func(_ []newrelic.Row) error { panic(fmt.Errorf("unexpected call into callback")) } r := bytes.NewReader([]byte(req)) diff --git a/lib/protoparser/opentelemetry/stream/streamparser_timing_test.go b/lib/protoparser/opentelemetry/stream/streamparser_timing_test.go index 2915786cd..b0df4135f 100644 --- a/lib/protoparser/opentelemetry/stream/streamparser_timing_test.go +++ b/lib/protoparser/opentelemetry/stream/streamparser_timing_test.go @@ -24,7 +24,7 @@ func BenchmarkParseStream(b *testing.B) { data := pbRequest.MarshalProtobuf(nil) for p.Next() { - err := ParseStream(bytes.NewBuffer(data), false, nil, func(tss []prompbmarshal.TimeSeries) error { + err := ParseStream(bytes.NewBuffer(data), false, nil, func(_ []prompbmarshal.TimeSeries) error { return nil }) if err != nil { diff --git a/lib/storage/tag_filters.go b/lib/storage/tag_filters.go index 9214651c6..a860b4dea 100644 --- a/lib/storage/tag_filters.go +++ b/lib/storage/tag_filters.go @@ -644,7 +644,7 @@ const ( func getOptimizedReMatchFuncExt(reMatch func(b []byte) bool, sre *syntax.Regexp) (func(b []byte) bool, string, uint64) { if isDotStar(sre) { // '.*' - return func(b []byte) bool { + return func(_ []byte) bool { return true }, "", fullMatchCost } diff --git a/lib/storage/tag_filters_test.go b/lib/storage/tag_filters_test.go index 730b02f41..1eedeb454 100644 --- a/lib/storage/tag_filters_test.go +++ b/lib/storage/tag_filters_test.go @@ -784,7 +784,7 @@ func TestTagFilterMatchSuffix(t *testing.T) { } } - t.Run("plain-value", func(t *testing.T) { + t.Run("plain-value", func(_ *testing.T) { value := "xx" isNegative := false isRegexp := false @@ -796,7 +796,7 @@ func TestTagFilterMatchSuffix(t *testing.T) { mismatch("foo") mismatch("xx") }) - t.Run("negative-plain-value", func(t *testing.T) { + t.Run("negative-plain-value", func(_ *testing.T) { value := "xx" isNegative := true isRegexp := false @@ -811,7 +811,7 @@ func TestTagFilterMatchSuffix(t *testing.T) { match("xxx") match("xxfoo") }) - t.Run("regexp-convert-to-plain-value", func(t *testing.T) { + t.Run("regexp-convert-to-plain-value", func(_ *testing.T) { value := "http" isNegative := false isRegexp := true @@ -824,7 +824,7 @@ func TestTagFilterMatchSuffix(t *testing.T) { mismatch("http") mismatch("foobar") }) - t.Run("negative-regexp-convert-to-plain-value", func(t *testing.T) { + t.Run("negative-regexp-convert-to-plain-value", func(_ *testing.T) { value := "http" isNegative := true isRegexp := true @@ -839,7 +839,7 @@ func TestTagFilterMatchSuffix(t *testing.T) { match("httpx") match("foobar") }) - t.Run("regexp-prefix-any-suffix", func(t *testing.T) { + t.Run("regexp-prefix-any-suffix", func(_ *testing.T) { value := "http.*" isNegative := false isRegexp := true @@ -852,7 +852,7 @@ func TestTagFilterMatchSuffix(t *testing.T) { match("http") match("foobar") }) - t.Run("negative-regexp-prefix-any-suffix", func(t *testing.T) { + t.Run("negative-regexp-prefix-any-suffix", func(_ *testing.T) { value := "http.*" isNegative := true isRegexp := true @@ -867,7 +867,7 @@ func TestTagFilterMatchSuffix(t *testing.T) { mismatch("httpsdf") mismatch("foobar") }) - t.Run("regexp-prefix-contains-suffix", func(t *testing.T) { + t.Run("regexp-prefix-contains-suffix", func(_ *testing.T) { value := "http.*foo.*" isNegative := false isRegexp := true @@ -883,7 +883,7 @@ func TestTagFilterMatchSuffix(t *testing.T) { match("xfoobar") match("xfoo") }) - t.Run("negative-regexp-prefix-contains-suffix", func(t *testing.T) { + t.Run("negative-regexp-prefix-contains-suffix", func(_ *testing.T) { value := "http.*foo.*" isNegative := true isRegexp := true @@ -903,7 +903,7 @@ func TestTagFilterMatchSuffix(t *testing.T) { mismatch("httpxfoobar") mismatch("httpxfoo") }) - t.Run("negative-regexp-noprefix-contains-suffix", func(t *testing.T) { + t.Run("negative-regexp-noprefix-contains-suffix", func(_ *testing.T) { value := ".*foo.*" isNegative := true isRegexp := true @@ -919,7 +919,7 @@ func TestTagFilterMatchSuffix(t *testing.T) { mismatch("xfoobar") mismatch("xfoo") }) - t.Run("regexp-prefix-special-suffix", func(t *testing.T) { + t.Run("regexp-prefix-special-suffix", func(_ *testing.T) { value := "http.*bar" isNegative := false isRegexp := true @@ -934,7 +934,7 @@ func TestTagFilterMatchSuffix(t *testing.T) { match("foobar") mismatch("foobarx") }) - t.Run("negative-regexp-prefix-special-suffix", func(t *testing.T) { + t.Run("negative-regexp-prefix-special-suffix", func(_ *testing.T) { value := "http.*bar" isNegative := true isRegexp := true @@ -951,7 +951,7 @@ func TestTagFilterMatchSuffix(t *testing.T) { match("httpxybarx") mismatch("ahttpxybar") }) - t.Run("negative-regexp-noprefix-special-suffix", func(t *testing.T) { + t.Run("negative-regexp-noprefix-special-suffix", func(_ *testing.T) { value := ".*bar" isNegative := true isRegexp := true @@ -1002,7 +1002,7 @@ func TestTagFilterMatchSuffix(t *testing.T) { mismatch("bar") match("xhttpbar") }) - t.Run("regexp-iflag-no-suffix", func(t *testing.T) { + t.Run("regexp-iflag-no-suffix", func(_ *testing.T) { value := "(?i)http" isNegative := false isRegexp := true @@ -1020,7 +1020,7 @@ func TestTagFilterMatchSuffix(t *testing.T) { mismatch("xhttp://") mismatch("hTTp://foobar.com") }) - t.Run("negative-regexp-iflag-no-suffix", func(t *testing.T) { + t.Run("negative-regexp-iflag-no-suffix", func(_ *testing.T) { value := "(?i)http" isNegative := true isRegexp := true @@ -1038,7 +1038,7 @@ func TestTagFilterMatchSuffix(t *testing.T) { match("xhttp://") match("hTTp://foobar.com") }) - t.Run("regexp-iflag-any-suffix", func(t *testing.T) { + t.Run("regexp-iflag-any-suffix", func(_ *testing.T) { value := "(?i)http.*" isNegative := false isRegexp := true @@ -1055,7 +1055,7 @@ func TestTagFilterMatchSuffix(t *testing.T) { mismatch("xhttp") mismatch("xhttp://") }) - t.Run("negative-regexp-iflag-any-suffix", func(t *testing.T) { + t.Run("negative-regexp-iflag-any-suffix", func(_ *testing.T) { value := "(?i)http.*" isNegative := true isRegexp := true diff --git a/lib/storage/tag_filters_timing_test.go b/lib/storage/tag_filters_timing_test.go index 3789560fe..f9db7facc 100644 --- a/lib/storage/tag_filters_timing_test.go +++ b/lib/storage/tag_filters_timing_test.go @@ -411,7 +411,7 @@ func BenchmarkOptimizedReMatchCost(b *testing.B) { }) }) b.Run(".*", func(b *testing.B) { - reMatch := func(b []byte) bool { + reMatch := func(_ []byte) bool { return true } suffix := []byte("foo1.bar.baz.sss.ddd") diff --git a/lib/streamaggr/deduplicator_timing_test.go b/lib/streamaggr/deduplicator_timing_test.go index e4e859590..e2b764041 100644 --- a/lib/streamaggr/deduplicator_timing_test.go +++ b/lib/streamaggr/deduplicator_timing_test.go @@ -8,7 +8,7 @@ import ( ) func BenchmarkDeduplicatorPush(b *testing.B) { - pushFunc := func(tss []prompbmarshal.TimeSeries) {} + pushFunc := func(_ []prompbmarshal.TimeSeries) {} d := NewDeduplicator(pushFunc, time.Hour, nil) b.ReportAllocs() diff --git a/lib/streamaggr/streamaggr_test.go b/lib/streamaggr/streamaggr_test.go index 754c06428..5a079979b 100644 --- a/lib/streamaggr/streamaggr_test.go +++ b/lib/streamaggr/streamaggr_test.go @@ -17,7 +17,7 @@ import ( func TestAggregatorsFailure(t *testing.T) { f := func(config string) { t.Helper() - pushFunc := func(tss []prompbmarshal.TimeSeries) { + pushFunc := func(_ []prompbmarshal.TimeSeries) { panic(fmt.Errorf("pushFunc shouldn't be called")) } a, err := newAggregatorsFromData([]byte(config), pushFunc, nil) @@ -157,7 +157,7 @@ func TestAggregatorsEqual(t *testing.T) { f := func(a, b string, expectedResult bool) { t.Helper() - pushFunc := func(tss []prompbmarshal.TimeSeries) {} + pushFunc := func(_ []prompbmarshal.TimeSeries) {} aa, err := newAggregatorsFromData([]byte(a), pushFunc, nil) if err != nil { t.Fatalf("cannot initialize aggregators: %s", err) diff --git a/lib/streamaggr/streamaggr_timing_test.go b/lib/streamaggr/streamaggr_timing_test.go index a0561b652..0a5dc31c4 100644 --- a/lib/streamaggr/streamaggr_timing_test.go +++ b/lib/streamaggr/streamaggr_timing_test.go @@ -43,7 +43,7 @@ func BenchmarkAggregatorsFlushSerial(b *testing.B) { "max", "avg", "increase", "count_series", "last", "stddev", "stdvar", "total_prometheus", "increase_prometheus", } - pushFunc := func(tss []prompbmarshal.TimeSeries) {} + pushFunc := func(_ []prompbmarshal.TimeSeries) {} a := newBenchAggregators(outputs, pushFunc) defer a.MustStop() _ = a.Push(benchSeries, nil) @@ -59,7 +59,7 @@ func BenchmarkAggregatorsFlushSerial(b *testing.B) { } func benchmarkAggregatorsPush(b *testing.B, output string) { - pushFunc := func(tss []prompbmarshal.TimeSeries) {} + pushFunc := func(_ []prompbmarshal.TimeSeries) {} a := newBenchAggregators([]string{output}, pushFunc) defer a.MustStop() diff --git a/lib/uint64set/uint64set_test.go b/lib/uint64set/uint64set_test.go index f6c7ed279..d835382b9 100644 --- a/lib/uint64set/uint64set_test.go +++ b/lib/uint64set/uint64set_test.go @@ -403,7 +403,7 @@ func testSetBasicOps(t *testing.T, itemsCount int) { // Verify fast stop calls := 0 - s.ForEach(func(part []uint64) bool { + s.ForEach(func(_ []uint64) bool { calls++ return false }) @@ -413,7 +413,7 @@ func testSetBasicOps(t *testing.T, itemsCount int) { // Verify ForEach on nil set. var s1 *Set - s1.ForEach(func(part []uint64) bool { + s1.ForEach(func(_ []uint64) bool { t.Fatalf("callback shouldn't be called on empty set") return true })