all: fix golangci-lint(revive) warnings after 0c0ed61ce7

Updates https://github.com/VictoriaMetrics/VictoriaMetrics/pull/6001
This commit is contained in:
Aliaksandr Valialkin 2024-04-02 23:16:24 +03:00
parent c3a72b6cdb
commit 918cccaddf
No known key found for this signature in database
GPG key ID: 52C003EE2BCDB9EB
47 changed files with 976 additions and 970 deletions

View file

@ -369,7 +369,7 @@ func readIn(readFor string, t *testing.T, insertTime time.Time) []test {
t.Helper() t.Helper()
s := newSuite(t) s := newSuite(t)
var tt []test var tt []test
s.noError(filepath.Walk(filepath.Join(testFixturesDir, readFor), func(path string, info os.FileInfo, err error) error { s.noError(filepath.Walk(filepath.Join(testFixturesDir, readFor), func(path string, _ os.FileInfo, err error) error {
if err != nil { if err != nil {
return err return err
} }

View file

@ -33,7 +33,7 @@ func benchmarkReadBulkRequest(b *testing.B, isGzip bool) {
timeField := "@timestamp" timeField := "@timestamp"
msgField := "message" msgField := "message"
processLogMessage := func(timestmap int64, fields []logstorage.Field) {} processLogMessage := func(_ int64, _ []logstorage.Field) {}
b.ReportAllocs() b.ReportAllocs()
b.SetBytes(int64(len(data))) b.SetBytes(int64(len(data)))

View file

@ -11,7 +11,7 @@ import (
func TestParseJSONRequestFailure(t *testing.T) { func TestParseJSONRequestFailure(t *testing.T) {
f := func(s string) { f := func(s string) {
t.Helper() t.Helper()
n, err := parseJSONRequest([]byte(s), func(timestamp int64, fields []logstorage.Field) { n, err := parseJSONRequest([]byte(s), func(_ int64, _ []logstorage.Field) {
t.Fatalf("unexpected call to parseJSONRequest callback!") t.Fatalf("unexpected call to parseJSONRequest callback!")
}) })
if err == nil { if err == nil {

View file

@ -27,7 +27,7 @@ func benchmarkParseJSONRequest(b *testing.B, streams, rows, labels int) {
b.RunParallel(func(pb *testing.PB) { b.RunParallel(func(pb *testing.PB) {
data := getJSONBody(streams, rows, labels) data := getJSONBody(streams, rows, labels)
for pb.Next() { for pb.Next() {
_, err := parseJSONRequest(data, func(timestamp int64, fields []logstorage.Field) {}) _, err := parseJSONRequest(data, func(_ int64, _ []logstorage.Field) {})
if err != nil { if err != nil {
panic(fmt.Errorf("unexpected error: %w", err)) panic(fmt.Errorf("unexpected error: %w", err))
} }

View file

@ -29,7 +29,7 @@ func benchmarkParseProtobufRequest(b *testing.B, streams, rows, labels int) {
b.RunParallel(func(pb *testing.PB) { b.RunParallel(func(pb *testing.PB) {
body := getProtobufBody(streams, rows, labels) body := getProtobufBody(streams, rows, labels)
for pb.Next() { for pb.Next() {
_, err := parseProtobufRequest(body, func(timestamp int64, fields []logstorage.Field) {}) _, err := parseProtobufRequest(body, func(_ int64, _ []logstorage.Field) {})
if err != nil { if err != nil {
panic(fmt.Errorf("unexpected error: %w", err)) panic(fmt.Errorf("unexpected error: %w", err))
} }

View file

@ -1004,7 +1004,7 @@ func getRowsCount(tss []prompbmarshal.TimeSeries) int {
// CheckStreamAggrConfigs checks configs pointed by -remoteWrite.streamAggr.config // CheckStreamAggrConfigs checks configs pointed by -remoteWrite.streamAggr.config
func CheckStreamAggrConfigs() error { func CheckStreamAggrConfigs() error {
pushNoop := func(tss []prompbmarshal.TimeSeries) {} pushNoop := func(_ []prompbmarshal.TimeSeries) {}
for idx, sasFile := range *streamAggrConfig { for idx, sasFile := range *streamAggrConfig {
if sasFile == "" { if sasFile == "" {
continue continue

View file

@ -71,7 +71,7 @@ func TestVMInstantQuery(t *testing.T) {
w.Write([]byte(`{"status":"success","data":{"resultType":"scalar","result":[1583786142, "1"]},"stats":{"seriesFetched": "42"}}`)) w.Write([]byte(`{"status":"success","data":{"resultType":"scalar","result":[1583786142, "1"]},"stats":{"seriesFetched": "42"}}`))
} }
}) })
mux.HandleFunc("/render", func(w http.ResponseWriter, request *http.Request) { mux.HandleFunc("/render", func(w http.ResponseWriter, _ *http.Request) {
c++ c++
switch c { switch c {
case 8: case 8:

View file

@ -304,7 +304,7 @@ func getAlertURLGenerator(externalURL *url.URL, externalAlertSource string, vali
"tpl": externalAlertSource, "tpl": externalAlertSource,
} }
return func(alert notifier.Alert) string { return func(alert notifier.Alert) string {
qFn := func(query string) ([]datasource.Metric, error) { qFn := func(_ string) ([]datasource.Metric, error) {
return nil, fmt.Errorf("`query` template isn't supported for alert source template") return nil, fmt.Errorf("`query` template isn't supported for alert source template")
} }
templated, err := alert.ExecTemplate(qFn, alert.Labels, m) templated, err := alert.ExecTemplate(qFn, alert.Labels, m)

View file

@ -178,7 +178,7 @@ func TestAlert_ExecTemplate(t *testing.T) {
}, },
} }
qFn := func(q string) ([]datasource.Metric, error) { qFn := func(_ string) ([]datasource.Metric, error) {
return []datasource.Metric{ return []datasource.Metric{
{ {
Labels: []datasource.Label{ Labels: []datasource.Label{

View file

@ -310,7 +310,7 @@ func (ar *AlertingRule) execRange(ctx context.Context, start, end time.Time) ([]
} }
var result []prompbmarshal.TimeSeries var result []prompbmarshal.TimeSeries
holdAlertState := make(map[uint64]*notifier.Alert) holdAlertState := make(map[uint64]*notifier.Alert)
qFn := func(query string) ([]datasource.Metric, error) { qFn := func(_ string) ([]datasource.Metric, error) {
return nil, fmt.Errorf("`query` template isn't supported in replay mode") return nil, fmt.Errorf("`query` template isn't supported in replay mode")
} }
for _, s := range res.Data { for _, s := range res.Data {

View file

@ -476,7 +476,7 @@ func templateFuncs() textTpl.FuncMap {
// For example, {{ query "foo" | first | value }} will // For example, {{ query "foo" | first | value }} will
// execute "/api/v1/query?query=foo" request and will return // execute "/api/v1/query?query=foo" request and will return
// the first value in response. // the first value in response.
"query": func(q string) ([]metric, error) { "query": func(_ string) ([]metric, error) {
// query function supposed to be substituted at FuncsWithQuery(). // query function supposed to be substituted at FuncsWithQuery().
// it is present here only for validation purposes, when there is no // it is present here only for validation purposes, when there is no
// provided datasource. // provided datasource.

View file

@ -36,7 +36,7 @@ func TestHandler(t *testing.T) {
}} }}
rh := &requestHandler{m: m} rh := &requestHandler{m: m}
getResp := func(url string, to interface{}, code int) { getResp := func(t *testing.T, url string, to interface{}, code int) {
t.Helper() t.Helper()
resp, err := http.Get(url) resp, err := http.Get(url)
if err != nil { if err != nil {
@ -60,43 +60,43 @@ func TestHandler(t *testing.T) {
defer ts.Close() defer ts.Close()
t.Run("/", func(t *testing.T) { t.Run("/", func(t *testing.T) {
getResp(ts.URL, nil, 200) getResp(t, ts.URL, nil, 200)
getResp(ts.URL+"/vmalert", nil, 200) getResp(t, ts.URL+"/vmalert", nil, 200)
getResp(ts.URL+"/vmalert/alerts", nil, 200) getResp(t, ts.URL+"/vmalert/alerts", nil, 200)
getResp(ts.URL+"/vmalert/groups", nil, 200) getResp(t, ts.URL+"/vmalert/groups", nil, 200)
getResp(ts.URL+"/vmalert/notifiers", nil, 200) getResp(t, ts.URL+"/vmalert/notifiers", nil, 200)
getResp(ts.URL+"/rules", nil, 200) getResp(t, ts.URL+"/rules", nil, 200)
}) })
t.Run("/vmalert/rule", func(t *testing.T) { t.Run("/vmalert/rule", func(t *testing.T) {
a := ruleToAPI(ar) a := ruleToAPI(ar)
getResp(ts.URL+"/vmalert/"+a.WebLink(), nil, 200) getResp(t, ts.URL+"/vmalert/"+a.WebLink(), nil, 200)
r := ruleToAPI(rr) r := ruleToAPI(rr)
getResp(ts.URL+"/vmalert/"+r.WebLink(), nil, 200) getResp(t, ts.URL+"/vmalert/"+r.WebLink(), nil, 200)
}) })
t.Run("/vmalert/alert", func(t *testing.T) { t.Run("/vmalert/alert", func(t *testing.T) {
alerts := ruleToAPIAlert(ar) alerts := ruleToAPIAlert(ar)
for _, a := range alerts { for _, a := range alerts {
getResp(ts.URL+"/vmalert/"+a.WebLink(), nil, 200) getResp(t, ts.URL+"/vmalert/"+a.WebLink(), nil, 200)
} }
}) })
t.Run("/vmalert/rule?badParam", func(t *testing.T) { t.Run("/vmalert/rule?badParam", func(t *testing.T) {
params := fmt.Sprintf("?%s=0&%s=1", paramGroupID, paramRuleID) params := fmt.Sprintf("?%s=0&%s=1", paramGroupID, paramRuleID)
getResp(ts.URL+"/vmalert/rule"+params, nil, 404) getResp(t, ts.URL+"/vmalert/rule"+params, nil, 404)
params = fmt.Sprintf("?%s=1&%s=0", paramGroupID, paramRuleID) params = fmt.Sprintf("?%s=1&%s=0", paramGroupID, paramRuleID)
getResp(ts.URL+"/vmalert/rule"+params, nil, 404) getResp(t, ts.URL+"/vmalert/rule"+params, nil, 404)
}) })
t.Run("/api/v1/alerts", func(t *testing.T) { t.Run("/api/v1/alerts", func(t *testing.T) {
lr := listAlertsResponse{} lr := listAlertsResponse{}
getResp(ts.URL+"/api/v1/alerts", &lr, 200) getResp(t, ts.URL+"/api/v1/alerts", &lr, 200)
if length := len(lr.Data.Alerts); length != 1 { if length := len(lr.Data.Alerts); length != 1 {
t.Errorf("expected 1 alert got %d", length) t.Errorf("expected 1 alert got %d", length)
} }
lr = listAlertsResponse{} lr = listAlertsResponse{}
getResp(ts.URL+"/vmalert/api/v1/alerts", &lr, 200) getResp(t, ts.URL+"/vmalert/api/v1/alerts", &lr, 200)
if length := len(lr.Data.Alerts); length != 1 { if length := len(lr.Data.Alerts); length != 1 {
t.Errorf("expected 1 alert got %d", length) t.Errorf("expected 1 alert got %d", length)
} }
@ -104,13 +104,13 @@ func TestHandler(t *testing.T) {
t.Run("/api/v1/alert?alertID&groupID", func(t *testing.T) { t.Run("/api/v1/alert?alertID&groupID", func(t *testing.T) {
expAlert := newAlertAPI(ar, ar.GetAlerts()[0]) expAlert := newAlertAPI(ar, ar.GetAlerts()[0])
alert := &apiAlert{} alert := &apiAlert{}
getResp(ts.URL+"/"+expAlert.APILink(), alert, 200) getResp(t, ts.URL+"/"+expAlert.APILink(), alert, 200)
if !reflect.DeepEqual(alert, expAlert) { if !reflect.DeepEqual(alert, expAlert) {
t.Errorf("expected %v is equal to %v", alert, expAlert) t.Errorf("expected %v is equal to %v", alert, expAlert)
} }
alert = &apiAlert{} alert = &apiAlert{}
getResp(ts.URL+"/vmalert/"+expAlert.APILink(), alert, 200) getResp(t, ts.URL+"/vmalert/"+expAlert.APILink(), alert, 200)
if !reflect.DeepEqual(alert, expAlert) { if !reflect.DeepEqual(alert, expAlert) {
t.Errorf("expected %v is equal to %v", alert, expAlert) t.Errorf("expected %v is equal to %v", alert, expAlert)
} }
@ -118,28 +118,28 @@ func TestHandler(t *testing.T) {
t.Run("/api/v1/alert?badParams", func(t *testing.T) { t.Run("/api/v1/alert?badParams", func(t *testing.T) {
params := fmt.Sprintf("?%s=0&%s=1", paramGroupID, paramAlertID) params := fmt.Sprintf("?%s=0&%s=1", paramGroupID, paramAlertID)
getResp(ts.URL+"/api/v1/alert"+params, nil, 404) getResp(t, ts.URL+"/api/v1/alert"+params, nil, 404)
getResp(ts.URL+"/vmalert/api/v1/alert"+params, nil, 404) getResp(t, ts.URL+"/vmalert/api/v1/alert"+params, nil, 404)
params = fmt.Sprintf("?%s=1&%s=0", paramGroupID, paramAlertID) params = fmt.Sprintf("?%s=1&%s=0", paramGroupID, paramAlertID)
getResp(ts.URL+"/api/v1/alert"+params, nil, 404) getResp(t, ts.URL+"/api/v1/alert"+params, nil, 404)
getResp(ts.URL+"/vmalert/api/v1/alert"+params, nil, 404) getResp(t, ts.URL+"/vmalert/api/v1/alert"+params, nil, 404)
// bad request, alertID is missing // bad request, alertID is missing
params = fmt.Sprintf("?%s=1", paramGroupID) params = fmt.Sprintf("?%s=1", paramGroupID)
getResp(ts.URL+"/api/v1/alert"+params, nil, 400) getResp(t, ts.URL+"/api/v1/alert"+params, nil, 400)
getResp(ts.URL+"/vmalert/api/v1/alert"+params, nil, 400) getResp(t, ts.URL+"/vmalert/api/v1/alert"+params, nil, 400)
}) })
t.Run("/api/v1/rules", func(t *testing.T) { t.Run("/api/v1/rules", func(t *testing.T) {
lr := listGroupsResponse{} lr := listGroupsResponse{}
getResp(ts.URL+"/api/v1/rules", &lr, 200) getResp(t, ts.URL+"/api/v1/rules", &lr, 200)
if length := len(lr.Data.Groups); length != 1 { if length := len(lr.Data.Groups); length != 1 {
t.Errorf("expected 1 group got %d", length) t.Errorf("expected 1 group got %d", length)
} }
lr = listGroupsResponse{} lr = listGroupsResponse{}
getResp(ts.URL+"/vmalert/api/v1/rules", &lr, 200) getResp(t, ts.URL+"/vmalert/api/v1/rules", &lr, 200)
if length := len(lr.Data.Groups); length != 1 { if length := len(lr.Data.Groups); length != 1 {
t.Errorf("expected 1 group got %d", length) t.Errorf("expected 1 group got %d", length)
} }
@ -147,21 +147,21 @@ func TestHandler(t *testing.T) {
t.Run("/api/v1/rule?ruleID&groupID", func(t *testing.T) { t.Run("/api/v1/rule?ruleID&groupID", func(t *testing.T) {
expRule := ruleToAPI(ar) expRule := ruleToAPI(ar)
gotRule := apiRule{} gotRule := apiRule{}
getResp(ts.URL+"/"+expRule.APILink(), &gotRule, 200) getResp(t, ts.URL+"/"+expRule.APILink(), &gotRule, 200)
if expRule.ID != gotRule.ID { if expRule.ID != gotRule.ID {
t.Errorf("expected to get Rule %q; got %q instead", expRule.ID, gotRule.ID) t.Errorf("expected to get Rule %q; got %q instead", expRule.ID, gotRule.ID)
} }
gotRule = apiRule{} gotRule = apiRule{}
getResp(ts.URL+"/vmalert/"+expRule.APILink(), &gotRule, 200) getResp(t, ts.URL+"/vmalert/"+expRule.APILink(), &gotRule, 200)
if expRule.ID != gotRule.ID { if expRule.ID != gotRule.ID {
t.Errorf("expected to get Rule %q; got %q instead", expRule.ID, gotRule.ID) t.Errorf("expected to get Rule %q; got %q instead", expRule.ID, gotRule.ID)
} }
gotRuleWithUpdates := apiRuleWithUpdates{} gotRuleWithUpdates := apiRuleWithUpdates{}
getResp(ts.URL+"/"+expRule.APILink(), &gotRuleWithUpdates, 200) getResp(t, ts.URL+"/"+expRule.APILink(), &gotRuleWithUpdates, 200)
if gotRuleWithUpdates.StateUpdates == nil || len(gotRuleWithUpdates.StateUpdates) < 1 { if gotRuleWithUpdates.StateUpdates == nil || len(gotRuleWithUpdates.StateUpdates) < 1 {
t.Fatalf("expected %+v to have state updates field not empty", gotRuleWithUpdates.StateUpdates) t.Fatalf("expected %+v to have state updates field not empty", gotRuleWithUpdates.StateUpdates)
} }
@ -171,7 +171,7 @@ func TestHandler(t *testing.T) {
check := func(url string, expGroups, expRules int) { check := func(url string, expGroups, expRules int) {
t.Helper() t.Helper()
lr := listGroupsResponse{} lr := listGroupsResponse{}
getResp(ts.URL+url, &lr, 200) getResp(t, ts.URL+url, &lr, 200)
if length := len(lr.Data.Groups); length != expGroups { if length := len(lr.Data.Groups); length != expGroups {
t.Errorf("expected %d groups got %d", expGroups, length) t.Errorf("expected %d groups got %d", expGroups, length)
} }
@ -210,7 +210,7 @@ func TestHandler(t *testing.T) {
t.Run("/api/v1/rules&exclude_alerts=true", func(t *testing.T) { t.Run("/api/v1/rules&exclude_alerts=true", func(t *testing.T) {
// check if response returns active alerts by default // check if response returns active alerts by default
lr := listGroupsResponse{} lr := listGroupsResponse{}
getResp(ts.URL+"/api/v1/rules?rule_group[]=group&file[]=rules.yaml", &lr, 200) getResp(t, ts.URL+"/api/v1/rules?rule_group[]=group&file[]=rules.yaml", &lr, 200)
activeAlerts := 0 activeAlerts := 0
for _, gr := range lr.Data.Groups { for _, gr := range lr.Data.Groups {
for _, r := range gr.Rules { for _, r := range gr.Rules {
@ -223,7 +223,7 @@ func TestHandler(t *testing.T) {
// disable returning alerts via param // disable returning alerts via param
lr = listGroupsResponse{} lr = listGroupsResponse{}
getResp(ts.URL+"/api/v1/rules?rule_group[]=group&file[]=rules.yaml&exclude_alerts=true", &lr, 200) getResp(t, ts.URL+"/api/v1/rules?rule_group[]=group&file[]=rules.yaml&exclude_alerts=true", &lr, 200)
activeAlerts = 0 activeAlerts = 0
for _, gr := range lr.Data.Groups { for _, gr := range lr.Data.Groups {
for _, r := range gr.Rules { for _, r := range gr.Rules {
@ -241,7 +241,7 @@ func TestEmptyResponse(t *testing.T) {
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { rhWithNoGroups.handler(w, r) })) ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { rhWithNoGroups.handler(w, r) }))
defer ts.Close() defer ts.Close()
getResp := func(url string, to interface{}, code int) { getResp := func(t *testing.T, url string, to interface{}, code int) {
t.Helper() t.Helper()
resp, err := http.Get(url) resp, err := http.Get(url)
if err != nil { if err != nil {
@ -264,13 +264,13 @@ func TestEmptyResponse(t *testing.T) {
t.Run("no groups /api/v1/alerts", func(t *testing.T) { t.Run("no groups /api/v1/alerts", func(t *testing.T) {
lr := listAlertsResponse{} lr := listAlertsResponse{}
getResp(ts.URL+"/api/v1/alerts", &lr, 200) getResp(t, ts.URL+"/api/v1/alerts", &lr, 200)
if lr.Data.Alerts == nil { if lr.Data.Alerts == nil {
t.Errorf("expected /api/v1/alerts response to have non-nil data") t.Errorf("expected /api/v1/alerts response to have non-nil data")
} }
lr = listAlertsResponse{} lr = listAlertsResponse{}
getResp(ts.URL+"/vmalert/api/v1/alerts", &lr, 200) getResp(t, ts.URL+"/vmalert/api/v1/alerts", &lr, 200)
if lr.Data.Alerts == nil { if lr.Data.Alerts == nil {
t.Errorf("expected /api/v1/alerts response to have non-nil data") t.Errorf("expected /api/v1/alerts response to have non-nil data")
} }
@ -278,13 +278,13 @@ func TestEmptyResponse(t *testing.T) {
t.Run("no groups /api/v1/rules", func(t *testing.T) { t.Run("no groups /api/v1/rules", func(t *testing.T) {
lr := listGroupsResponse{} lr := listGroupsResponse{}
getResp(ts.URL+"/api/v1/rules", &lr, 200) getResp(t, ts.URL+"/api/v1/rules", &lr, 200)
if lr.Data.Groups == nil { if lr.Data.Groups == nil {
t.Errorf("expected /api/v1/rules response to have non-nil data") t.Errorf("expected /api/v1/rules response to have non-nil data")
} }
lr = listGroupsResponse{} lr = listGroupsResponse{}
getResp(ts.URL+"/vmalert/api/v1/rules", &lr, 200) getResp(t, ts.URL+"/vmalert/api/v1/rules", &lr, 200)
if lr.Data.Groups == nil { if lr.Data.Groups == nil {
t.Errorf("expected /api/v1/rules response to have non-nil data") t.Errorf("expected /api/v1/rules response to have non-nil data")
} }
@ -295,13 +295,13 @@ func TestEmptyResponse(t *testing.T) {
t.Run("empty group /api/v1/rules", func(t *testing.T) { t.Run("empty group /api/v1/rules", func(t *testing.T) {
lr := listGroupsResponse{} lr := listGroupsResponse{}
getResp(ts.URL+"/api/v1/rules", &lr, 200) getResp(t, ts.URL+"/api/v1/rules", &lr, 200)
if lr.Data.Groups == nil { if lr.Data.Groups == nil {
t.Fatalf("expected /api/v1/rules response to have non-nil data") t.Fatalf("expected /api/v1/rules response to have non-nil data")
} }
lr = listGroupsResponse{} lr = listGroupsResponse{}
getResp(ts.URL+"/vmalert/api/v1/rules", &lr, 200) getResp(t, ts.URL+"/vmalert/api/v1/rules", &lr, 200)
if lr.Data.Groups == nil { if lr.Data.Groups == nil {
t.Fatalf("expected /api/v1/rules response to have non-nil data") t.Fatalf("expected /api/v1/rules response to have non-nil data")
} }

View file

@ -373,7 +373,7 @@ func main() {
return cli.Exit(fmt.Errorf("cannot open exported block at path=%q err=%w", blockPath, err), 1) return cli.Exit(fmt.Errorf("cannot open exported block at path=%q err=%w", blockPath, err), 1)
} }
var blocksCount atomic.Uint64 var blocksCount atomic.Uint64
if err := stream.Parse(f, isBlockGzipped, func(block *stream.Block) error { if err := stream.Parse(f, isBlockGzipped, func(_ *stream.Block) error {
blocksCount.Add(1) blocksCount.Add(1)
return nil return nil
}); err != nil { }); err != nil {

View file

@ -54,7 +54,7 @@ func CheckStreamAggrConfig() error {
if *streamAggrConfig == "" { if *streamAggrConfig == "" {
return nil return nil
} }
pushNoop := func(tss []prompbmarshal.TimeSeries) {} pushNoop := func(_ []prompbmarshal.TimeSeries) {}
opts := &streamaggr.Options{ opts := &streamaggr.Options{
DedupInterval: *streamAggrDedupInterval, DedupInterval: *streamAggrDedupInterval,
DropInputLabels: *streamAggrDropInputLabels, DropInputLabels: *streamAggrDropInputLabels,

View file

@ -101,7 +101,7 @@ func Init() {
if len(*opentsdbHTTPListenAddr) > 0 { if len(*opentsdbHTTPListenAddr) > 0 {
opentsdbhttpServer = opentsdbhttpserver.MustStart(*opentsdbHTTPListenAddr, *opentsdbHTTPUseProxyProtocol, opentsdbhttp.InsertHandler) opentsdbhttpServer = opentsdbhttpserver.MustStart(*opentsdbHTTPListenAddr, *opentsdbHTTPUseProxyProtocol, opentsdbhttp.InsertHandler)
} }
promscrape.Init(func(at *auth.Token, wr *prompbmarshal.WriteRequest) { promscrape.Init(func(_ *auth.Token, wr *prompbmarshal.WriteRequest) {
prompush.Push(wr) prompush.Push(wr)
}) })
} }

View file

@ -160,7 +160,7 @@ func newNextSeriesForSearchQuery(ec *evalConfig, sq *storage.SearchQuery, expr g
seriesCh := make(chan *series, cgroup.AvailableCPUs()) seriesCh := make(chan *series, cgroup.AvailableCPUs())
errCh := make(chan error, 1) errCh := make(chan error, 1)
go func() { go func() {
err := rss.RunParallel(nil, func(rs *netstorage.Result, workerID uint) error { err := rss.RunParallel(nil, func(rs *netstorage.Result, _ uint) error {
nameWithTags := getCanonicalPath(&rs.MetricName) nameWithTags := getCanonicalPath(&rs.MetricName)
tags := unmarshalTags(nameWithTags) tags := unmarshalTags(nameWithTags)
s := &series{ s := &series{

View file

@ -405,7 +405,7 @@ func aggregateSeriesWithWildcards(ec *evalConfig, expr graphiteql.Expr, nextSeri
for _, pos := range positions { for _, pos := range positions {
positionsMap[pos] = struct{}{} positionsMap[pos] = struct{}{}
} }
keyFunc := func(name string, tags map[string]string) string { keyFunc := func(name string, _ map[string]string) string {
parts := strings.Split(getPathFromName(name), ".") parts := strings.Split(getPathFromName(name), ".")
dstParts := parts[:0] dstParts := parts[:0]
for i, part := range parts { for i, part := range parts {
@ -1881,7 +1881,7 @@ func transformGroupByTags(ec *evalConfig, fe *graphiteql.FuncExpr) (nextSeriesFu
if err != nil { if err != nil {
return nil, err return nil, err
} }
keyFunc := func(name string, tags map[string]string) string { keyFunc := func(_ string, tags map[string]string) string {
return formatKeyFromTags(tags, tagKeys, callback) return formatKeyFromTags(tags, tagKeys, callback)
} }
return groupByKeyFunc(ec, fe, nextSeries, callback, keyFunc) return groupByKeyFunc(ec, fe, nextSeries, callback, keyFunc)

View file

@ -251,7 +251,7 @@ func ExportNativeHandler(startTime time.Time, w http.ResponseWriter, r *http.Req
_, _ = bw.Write(trBuf) _, _ = bw.Write(trBuf)
// Marshal native blocks. // Marshal native blocks.
err = netstorage.ExportBlocks(nil, sq, cp.deadline, func(mn *storage.MetricName, b *storage.Block, tr storage.TimeRange, workerID uint) error { err = netstorage.ExportBlocks(nil, sq, cp.deadline, func(mn *storage.MetricName, b *storage.Block, _ storage.TimeRange, workerID uint) error {
if err := bw.Error(); err != nil { if err := bw.Error(); err != nil {
return err return err
} }
@ -1238,7 +1238,7 @@ func (sw *scalableWriter) maybeFlushBuffer(bb *bytesutil.ByteBuffer) error {
} }
func (sw *scalableWriter) flush() error { func (sw *scalableWriter) flush() error {
sw.m.Range(func(k, v interface{}) bool { sw.m.Range(func(_, v interface{}) bool {
bb := v.(*bytesutil.ByteBuffer) bb := v.(*bytesutil.ByteBuffer)
_, err := sw.bw.Write(bb.B) _, err := sw.bw.Write(bb.B)
return err == nil return err == nil

View file

@ -76,7 +76,7 @@ func newAggrFunc(afe func(tss []*timeseries) []*timeseries) aggrFunc {
if err != nil { if err != nil {
return nil, err return nil, err
} }
return aggrFuncExt(func(tss []*timeseries, modififer *metricsql.ModifierExpr) []*timeseries { return aggrFuncExt(func(tss []*timeseries, _ *metricsql.ModifierExpr) []*timeseries {
return afe(tss) return afe(tss)
}, tss, &afa.ae.Modifier, afa.ae.Limit, false) }, tss, &afa.ae.Modifier, afa.ae.Limit, false)
} }
@ -158,7 +158,7 @@ func aggrFuncAny(afa *aggrFuncArg) ([]*timeseries, error) {
if err != nil { if err != nil {
return nil, err return nil, err
} }
afe := func(tss []*timeseries, modifier *metricsql.ModifierExpr) []*timeseries { afe := func(tss []*timeseries, _ *metricsql.ModifierExpr) []*timeseries {
return tss[:1] return tss[:1]
} }
limit := afa.ae.Limit limit := afa.ae.Limit
@ -467,7 +467,7 @@ func aggrFuncShare(afa *aggrFuncArg) ([]*timeseries, error) {
if err != nil { if err != nil {
return nil, err return nil, err
} }
afe := func(tss []*timeseries, modifier *metricsql.ModifierExpr) []*timeseries { afe := func(tss []*timeseries, _ *metricsql.ModifierExpr) []*timeseries {
for i := range tss[0].Values { for i := range tss[0].Values {
// Calculate sum for non-negative points at position i. // Calculate sum for non-negative points at position i.
var sum float64 var sum float64
@ -498,7 +498,7 @@ func aggrFuncZScore(afa *aggrFuncArg) ([]*timeseries, error) {
if err != nil { if err != nil {
return nil, err return nil, err
} }
afe := func(tss []*timeseries, modifier *metricsql.ModifierExpr) []*timeseries { afe := func(tss []*timeseries, _ *metricsql.ModifierExpr) []*timeseries {
for i := range tss[0].Values { for i := range tss[0].Values {
// Calculate avg and stddev for tss points at position i. // Calculate avg and stddev for tss points at position i.
// See `Rapid calculation methods` at https://en.wikipedia.org/wiki/Standard_deviation // See `Rapid calculation methods` at https://en.wikipedia.org/wiki/Standard_deviation
@ -594,7 +594,7 @@ func aggrFuncCountValues(afa *aggrFuncArg) ([]*timeseries, error) {
// Do nothing // Do nothing
} }
afe := func(tss []*timeseries, modififer *metricsql.ModifierExpr) ([]*timeseries, error) { afe := func(tss []*timeseries, _ *metricsql.ModifierExpr) ([]*timeseries, error) {
m := make(map[float64]*timeseries) m := make(map[float64]*timeseries)
for _, ts := range tss { for _, ts := range tss {
for i, v := range ts.Values { for i, v := range ts.Values {
@ -656,7 +656,7 @@ func newAggrFuncTopK(isReverse bool) aggrFunc {
if err != nil { if err != nil {
return nil, err return nil, err
} }
afe := func(tss []*timeseries, modififer *metricsql.ModifierExpr) []*timeseries { afe := func(tss []*timeseries, _ *metricsql.ModifierExpr) []*timeseries {
for n := range tss[0].Values { for n := range tss[0].Values {
lessFunc := lessWithNaNs lessFunc := lessWithNaNs
if isReverse { if isReverse {
@ -960,7 +960,7 @@ func aggrFuncOutliersIQR(afa *aggrFuncArg) ([]*timeseries, error) {
if err := expectTransformArgsNum(args, 1); err != nil { if err := expectTransformArgsNum(args, 1); err != nil {
return nil, err return nil, err
} }
afe := func(tss []*timeseries, modifier *metricsql.ModifierExpr) []*timeseries { afe := func(tss []*timeseries, _ *metricsql.ModifierExpr) []*timeseries {
// Calculate lower and upper bounds for interquartile range per each point across tss // Calculate lower and upper bounds for interquartile range per each point across tss
// according to Outliers section at https://en.wikipedia.org/wiki/Interquartile_range // according to Outliers section at https://en.wikipedia.org/wiki/Interquartile_range
lower, upper := getPerPointIQRBounds(tss) lower, upper := getPerPointIQRBounds(tss)
@ -1016,7 +1016,7 @@ func aggrFuncOutliersMAD(afa *aggrFuncArg) ([]*timeseries, error) {
if err != nil { if err != nil {
return nil, err return nil, err
} }
afe := func(tss []*timeseries, modifier *metricsql.ModifierExpr) []*timeseries { afe := func(tss []*timeseries, _ *metricsql.ModifierExpr) []*timeseries {
// Calculate medians for each point across tss. // Calculate medians for each point across tss.
medians := getPerPointMedians(tss) medians := getPerPointMedians(tss)
// Calculate MAD values multiplied by tolerance for each point across tss. // Calculate MAD values multiplied by tolerance for each point across tss.
@ -1052,7 +1052,7 @@ func aggrFuncOutliersK(afa *aggrFuncArg) ([]*timeseries, error) {
if err != nil { if err != nil {
return nil, err return nil, err
} }
afe := func(tss []*timeseries, modifier *metricsql.ModifierExpr) []*timeseries { afe := func(tss []*timeseries, _ *metricsql.ModifierExpr) []*timeseries {
// Calculate medians for each point across tss. // Calculate medians for each point across tss.
medians := getPerPointMedians(tss) medians := getPerPointMedians(tss)
// Return topK time series with the highest variance from median. // Return topK time series with the highest variance from median.
@ -1123,7 +1123,7 @@ func aggrFuncLimitK(afa *aggrFuncArg) ([]*timeseries, error) {
if limit < 0 { if limit < 0 {
limit = 0 limit = 0
} }
afe := func(tss []*timeseries, modifier *metricsql.ModifierExpr) []*timeseries { afe := func(tss []*timeseries, _ *metricsql.ModifierExpr) []*timeseries {
// Sort series by metricName hash in order to get consistent set of output series // Sort series by metricName hash in order to get consistent set of output series
// across multiple calls to limitk() function. // across multiple calls to limitk() function.
// Sort series by hash in order to guarantee uniform selection across series. // Sort series by hash in order to guarantee uniform selection across series.
@ -1187,7 +1187,7 @@ func aggrFuncQuantiles(afa *aggrFuncArg) ([]*timeseries, error) {
phis[i] = phisLocal[0] phis[i] = phisLocal[0]
} }
argOrig := args[len(args)-1] argOrig := args[len(args)-1]
afe := func(tss []*timeseries, modifier *metricsql.ModifierExpr) []*timeseries { afe := func(tss []*timeseries, _ *metricsql.ModifierExpr) []*timeseries {
tssDst := make([]*timeseries, len(phiArgs)) tssDst := make([]*timeseries, len(phiArgs))
for j := range tssDst { for j := range tssDst {
ts := &timeseries{} ts := &timeseries{}
@ -1244,7 +1244,7 @@ func aggrFuncMedian(afa *aggrFuncArg) ([]*timeseries, error) {
} }
func newAggrQuantileFunc(phis []float64) func(tss []*timeseries, modifier *metricsql.ModifierExpr) []*timeseries { func newAggrQuantileFunc(phis []float64) func(tss []*timeseries, modifier *metricsql.ModifierExpr) []*timeseries {
return func(tss []*timeseries, modifier *metricsql.ModifierExpr) []*timeseries { return func(tss []*timeseries, _ *metricsql.ModifierExpr) []*timeseries {
dst := tss[0] dst := tss[0]
a := getFloat64s() a := getFloat64s()
values := a.A values := a.A

View file

@ -74,7 +74,7 @@ func newBinaryOpCmpFunc(cf func(left, right float64) bool) binaryOpFunc {
} }
func newBinaryOpArithFunc(af func(left, right float64) float64) binaryOpFunc { func newBinaryOpArithFunc(af func(left, right float64) float64) binaryOpFunc {
afe := func(left, right float64, isBool bool) float64 { afe := func(left, right float64, _ bool) float64 {
return af(left, right) return af(left, right)
} }
return newBinaryOpFunc(afe) return newBinaryOpFunc(afe)

View file

@ -210,11 +210,13 @@ func TestExecSuccess(t *testing.T) {
f(q, resultExpected) f(q, resultExpected)
}) })
t.Run("scalar-string-nonnum", func(t *testing.T) { t.Run("scalar-string-nonnum", func(t *testing.T) {
t.Parallel()
q := `scalar("fooobar")` q := `scalar("fooobar")`
resultExpected := []netstorage.Result{} resultExpected := []netstorage.Result{}
f(q, resultExpected) f(q, resultExpected)
}) })
t.Run("scalar-string-num", func(t *testing.T) { t.Run("scalar-string-num", func(t *testing.T) {
t.Parallel()
q := `scalar("-12.34")` q := `scalar("-12.34")`
r := netstorage.Result{ r := netstorage.Result{
MetricName: metricNameExpected, MetricName: metricNameExpected,

View file

@ -371,10 +371,10 @@ func getRollupTag(expr metricsql.Expr) (string, error) {
func getRollupConfigs(funcName string, rf rollupFunc, expr metricsql.Expr, start, end, step int64, maxPointsPerSeries int, func getRollupConfigs(funcName string, rf rollupFunc, expr metricsql.Expr, start, end, step int64, maxPointsPerSeries int,
window, lookbackDelta int64, sharedTimestamps []int64) ( window, lookbackDelta int64, sharedTimestamps []int64) (
func(values []float64, timestamps []int64), []*rollupConfig, error) { func(values []float64, timestamps []int64), []*rollupConfig, error) {
preFunc := func(values []float64, timestamps []int64) {} preFunc := func(_ []float64, _ []int64) {}
funcName = strings.ToLower(funcName) funcName = strings.ToLower(funcName)
if rollupFuncsRemoveCounterResets[funcName] { if rollupFuncsRemoveCounterResets[funcName] {
preFunc = func(values []float64, timestamps []int64) { preFunc = func(values []float64, _ []int64) {
removeCounterResets(values) removeCounterResets(values)
} }
} }
@ -486,7 +486,7 @@ func getRollupConfigs(funcName string, rf rollupFunc, expr metricsql.Expr, start
for _, aggrFuncName := range aggrFuncNames { for _, aggrFuncName := range aggrFuncNames {
if rollupFuncsRemoveCounterResets[aggrFuncName] { if rollupFuncsRemoveCounterResets[aggrFuncName] {
// There is no need to save the previous preFunc, since it is either empty or the same. // There is no need to save the previous preFunc, since it is either empty or the same.
preFunc = func(values []float64, timestamps []int64) { preFunc = func(values []float64, _ []int64) {
removeCounterResets(values) removeCounterResets(values)
} }
} }

View file

@ -10,13 +10,13 @@ import (
) )
func TestRollupResultCacheInitStop(t *testing.T) { func TestRollupResultCacheInitStop(t *testing.T) {
t.Run("inmemory", func(t *testing.T) { t.Run("inmemory", func(_ *testing.T) {
for i := 0; i < 5; i++ { for i := 0; i < 5; i++ {
InitRollupResultCache("") InitRollupResultCache("")
StopRollupResultCache() StopRollupResultCache()
} }
}) })
t.Run("file-based", func(t *testing.T) { t.Run("file-based", func(_ *testing.T) {
cacheFilePath := "test-rollup-result-cache" cacheFilePath := "test-rollup-result-cache"
for i := 0; i < 3; i++ { for i := 0; i < 3; i++ {
InitRollupResultCache(cacheFilePath) InitRollupResultCache(cacheFilePath)

View file

@ -918,7 +918,7 @@ func transformHistogramQuantile(tfa *transformFuncArg) ([]*timeseries, error) {
m := groupLeTimeseries(tss) m := groupLeTimeseries(tss)
// Calculate quantile for each group in m // Calculate quantile for each group in m
lastNonInf := func(i int, xss []leTimeseries) float64 { lastNonInf := func(_ int, xss []leTimeseries) float64 {
for len(xss) > 0 { for len(xss) > 0 {
xsLast := xss[len(xss)-1] xsLast := xss[len(xss)-1]
if !math.IsInf(xsLast.le, 0) { if !math.IsInf(xsLast.le, 0) {

View file

@ -90,7 +90,7 @@ type RequestHandler func(w http.ResponseWriter, r *http.Request) bool
// See https://www.haproxy.org/download/1.8/doc/proxy-protocol.txt // See https://www.haproxy.org/download/1.8/doc/proxy-protocol.txt
func Serve(addrs []string, useProxyProtocol *flagutil.ArrayBool, rh RequestHandler) { func Serve(addrs []string, useProxyProtocol *flagutil.ArrayBool, rh RequestHandler) {
if rh == nil { if rh == nil {
rh = func(w http.ResponseWriter, r *http.Request) bool { rh = func(_ http.ResponseWriter, _ *http.Request) bool {
return false return false
} }
} }
@ -152,7 +152,7 @@ func serveWithListener(addr string, ln net.Listener, rh RequestHandler) {
ErrorLog: logger.StdErrorLogger(), ErrorLog: logger.StdErrorLogger(),
} }
if *connTimeout > 0 { if *connTimeout > 0 {
s.s.ConnContext = func(ctx context.Context, c net.Conn) context.Context { s.s.ConnContext = func(ctx context.Context, _ net.Conn) context.Context {
timeoutSec := connTimeout.Seconds() timeoutSec := connTimeout.Seconds()
// Add a jitter for connection timeout in order to prevent Thundering herd problem // Add a jitter for connection timeout in order to prevent Thundering herd problem
// when all the connections are established at the same time. // when all the connections are established at the same time.

View file

@ -211,11 +211,11 @@ func TestFilterBitmap(t *testing.T) {
}) })
// Clear all the bits // Clear all the bits
bm.forEachSetBit(func(idx int) bool { bm.forEachSetBit(func(_ int) bool {
return false return false
}) })
bitsCount := 0 bitsCount := 0
bm.forEachSetBit(func(idx int) bool { bm.forEachSetBit(func(_ int) bool {
bitsCount++ bitsCount++
return true return true
}) })
@ -9226,7 +9226,7 @@ func testFilterMatchForStorage(t *testing.T, s *Storage, tenantID TenantID, f fi
resultColumnNames: []string{resultColumnName}, resultColumnNames: []string{resultColumnName},
} }
workersCount := 3 workersCount := 3
s.search(workersCount, so, nil, func(workerID uint, br *blockResult) { s.search(workersCount, so, nil, func(_ uint, br *blockResult) {
// Verify tenantID // Verify tenantID
if !br.streamID.tenantID.equal(&tenantID) { if !br.streamID.tenantID.equal(&tenantID) {
t.Fatalf("unexpected tenantID in blockResult; got %s; want %s", &br.streamID.tenantID, &tenantID) t.Fatalf("unexpected tenantID in blockResult; got %s; want %s", &br.streamID.tenantID, &tenantID)

View file

@ -76,175 +76,174 @@ func TestStorageSearchStreamIDs(t *testing.T) {
} }
} }
}) })
t.Run("missing-job", func(t *testing.T) {
f(`{job="non-existing-job",instance="instance-0"}`, nil)
})
t.Run("missing-job-re", func(t *testing.T) {
f(`{job=~"non-existing-job|",instance="instance-0"}`, nil)
})
t.Run("missing-job-negative-re", func(t *testing.T) {
f(`{job!~"job.+",instance="instance-0"}`, nil)
})
t.Run("empty-job", func(t *testing.T) {
f(`{job="",instance="instance-0"}`, nil)
})
t.Run("missing-instance", func(t *testing.T) {
f(`{job="job-0",instance="non-existing-instance"}`, nil)
})
t.Run("missing-instance-re", func(t *testing.T) {
f(`{job="job-0",instance=~"non-existing-instance|"}`, nil)
})
t.Run("missing-instance-negative-re", func(t *testing.T) {
f(`{job="job-0",instance!~"instance.+"}`, nil)
})
t.Run("empty-instance", func(t *testing.T) {
f(`{job="job-0",instance=""}`, nil)
})
t.Run("non-existing-tag", func(t *testing.T) {
f(`{job="job-0",instance="instance-0",non_existing_tag="foobar"}`, nil)
})
t.Run("non-existing-non-empty-tag", func(t *testing.T) {
f(`{job="job-0",instance="instance-0",non_existing_tag!=""}`, nil)
})
t.Run("non-existing-tag-re", func(t *testing.T) {
f(`{job="job-0",instance="instance-0",non_existing_tag=~"foo.+"}`, nil)
})
t.Run("non-existing-non-empty-tag-re", func(t *testing.T) {
f(`{job="job-0",instance="instance-0",non_existing_tag!~""}`, nil)
})
t.Run("match-job-instance", func(t *testing.T) { // missing-job
f(`{job="non-existing-job",instance="instance-0"}`, nil)
// missing-job-re
f(`{job=~"non-existing-job|",instance="instance-0"}`, nil)
// missing-job-negative-re
f(`{job!~"job.+",instance="instance-0"}`, nil)
// empty-job
f(`{job="",instance="instance-0"}`, nil)
// missing-instance
f(`{job="job-0",instance="non-existing-instance"}`, nil)
// missing-instance-re
f(`{job="job-0",instance=~"non-existing-instance|"}`, nil)
// missing-instance-negative-re
f(`{job="job-0",instance!~"instance.+"}`, nil)
// empty-instance
f(`{job="job-0",instance=""}`, nil)
// non-existing-tag
f(`{job="job-0",instance="instance-0",non_existing_tag="foobar"}`, nil)
// non-existing-non-empty-tag
f(`{job="job-0",instance="instance-0",non_existing_tag!=""}`, nil)
// non-existing-tag-re
f(`{job="job-0",instance="instance-0",non_existing_tag=~"foo.+"}`, nil)
//non-existing-non-empty-tag-re
f(`{job="job-0",instance="instance-0",non_existing_tag!~""}`, nil)
// match-job-instance
sid, _ := getStreamIDForTags(map[string]string{
"instance": "instance-0",
"job": "job-0",
})
f(`{job="job-0",instance="instance-0"}`, []streamID{sid})
// match-non-existing-tag
sid, _ = getStreamIDForTags(map[string]string{
"instance": "instance-0",
"job": "job-0",
})
f(`{job="job-0",instance="instance-0",non_existing_tag=~"foo|"}`, []streamID{sid})
// match-job
var streamIDs []streamID
for i := 0; i < instancesCount; i++ {
sid, _ := getStreamIDForTags(map[string]string{ sid, _ := getStreamIDForTags(map[string]string{
"instance": "instance-0", "instance": fmt.Sprintf("instance-%d", i),
"job": "job-0", "job": "job-0",
}) })
f(`{job="job-0",instance="instance-0"}`, []streamID{sid}) streamIDs = append(streamIDs, sid)
}) }
t.Run("match-non-existing-tag", func(t *testing.T) { f(`{job="job-0"}`, streamIDs)
// match-instance
streamIDs = nil
for i := 0; i < jobsCount; i++ {
sid, _ := getStreamIDForTags(map[string]string{ sid, _ := getStreamIDForTags(map[string]string{
"instance": "instance-0", "instance": "instance-1",
"job": "job-0", "job": fmt.Sprintf("job-%d", i),
}) })
f(`{job="job-0",instance="instance-0",non_existing_tag=~"foo|"}`, []streamID{sid}) streamIDs = append(streamIDs, sid)
}) }
t.Run("match-job", func(t *testing.T) { f(`{instance="instance-1"}`, streamIDs)
var streamIDs []streamID
for i := 0; i < instancesCount; i++ { // match-re
streamIDs = nil
for _, instanceID := range []int{3, 1} {
for _, jobID := range []int{0, 2} {
sid, _ := getStreamIDForTags(map[string]string{ sid, _ := getStreamIDForTags(map[string]string{
"instance": fmt.Sprintf("instance-%d", i), "instance": fmt.Sprintf("instance-%d", instanceID),
"job": "job-0", "job": fmt.Sprintf("job-%d", jobID),
}) })
streamIDs = append(streamIDs, sid) streamIDs = append(streamIDs, sid)
} }
f(`{job="job-0"}`, streamIDs) }
}) f(`{job=~"job-(0|2)",instance=~"instance-[13]"}`, streamIDs)
t.Run("match-instance", func(t *testing.T) {
var streamIDs []streamID // match-re-empty-match
for i := 0; i < jobsCount; i++ { streamIDs = nil
for _, instanceID := range []int{3, 1} {
for _, jobID := range []int{0, 2} {
sid, _ := getStreamIDForTags(map[string]string{ sid, _ := getStreamIDForTags(map[string]string{
"instance": "instance-1", "instance": fmt.Sprintf("instance-%d", instanceID),
"job": fmt.Sprintf("job-%d", i), "job": fmt.Sprintf("job-%d", jobID),
}) })
streamIDs = append(streamIDs, sid) streamIDs = append(streamIDs, sid)
} }
f(`{instance="instance-1"}`, streamIDs) }
}) f(`{job=~"job-(0|2)|",instance=~"instance-[13]"}`, streamIDs)
t.Run("match-re", func(t *testing.T) {
var streamIDs []streamID // match-negative-re
for _, instanceID := range []int{3, 1} { var instanceIDs []int
for _, jobID := range []int{0, 2} { for i := 0; i < instancesCount; i++ {
sid, _ := getStreamIDForTags(map[string]string{ if i != 0 && i != 1 {
"instance": fmt.Sprintf("instance-%d", instanceID), instanceIDs = append(instanceIDs, i)
"job": fmt.Sprintf("job-%d", jobID),
})
streamIDs = append(streamIDs, sid)
}
} }
f(`{job=~"job-(0|2)",instance=~"instance-[13]"}`, streamIDs) }
}) var jobIDs []int
t.Run("match-re-empty-match", func(t *testing.T) { for i := 0; i < jobsCount; i++ {
var streamIDs []streamID if i > 2 {
for _, instanceID := range []int{3, 1} { jobIDs = append(jobIDs, i)
for _, jobID := range []int{0, 2} {
sid, _ := getStreamIDForTags(map[string]string{
"instance": fmt.Sprintf("instance-%d", instanceID),
"job": fmt.Sprintf("job-%d", jobID),
})
streamIDs = append(streamIDs, sid)
}
} }
f(`{job=~"job-(0|2)|",instance=~"instance-[13]"}`, streamIDs) }
}) streamIDs = nil
t.Run("match-negative-re", func(t *testing.T) { for _, instanceID := range instanceIDs {
var instanceIDs []int for _, jobID := range jobIDs {
for i := 0; i < instancesCount; i++ { sid, _ := getStreamIDForTags(map[string]string{
if i != 0 && i != 1 { "instance": fmt.Sprintf("instance-%d", instanceID),
instanceIDs = append(instanceIDs, i) "job": fmt.Sprintf("job-%d", jobID),
} })
streamIDs = append(streamIDs, sid)
} }
var jobIDs []int }
for i := 0; i < jobsCount; i++ { f(`{job!~"job-[0-2]",instance!~"instance-(0|1)"}`, streamIDs)
if i > 2 {
jobIDs = append(jobIDs, i) // match-negative-re-empty-match
} instanceIDs = nil
for i := 0; i < instancesCount; i++ {
if i != 0 && i != 1 {
instanceIDs = append(instanceIDs, i)
} }
var streamIDs []streamID }
for _, instanceID := range instanceIDs { jobIDs = nil
for _, jobID := range jobIDs { for i := 0; i < jobsCount; i++ {
sid, _ := getStreamIDForTags(map[string]string{ if i > 2 {
"instance": fmt.Sprintf("instance-%d", instanceID), jobIDs = append(jobIDs, i)
"job": fmt.Sprintf("job-%d", jobID),
})
streamIDs = append(streamIDs, sid)
}
} }
f(`{job!~"job-[0-2]",instance!~"instance-(0|1)"}`, streamIDs) }
}) streamIDs = nil
t.Run("match-negative-re-empty-match", func(t *testing.T) { for _, instanceID := range instanceIDs {
var instanceIDs []int for _, jobID := range jobIDs {
for i := 0; i < instancesCount; i++ { sid, _ := getStreamIDForTags(map[string]string{
if i != 0 && i != 1 { "instance": fmt.Sprintf("instance-%d", instanceID),
instanceIDs = append(instanceIDs, i) "job": fmt.Sprintf("job-%d", jobID),
} })
streamIDs = append(streamIDs, sid)
} }
var jobIDs []int }
for i := 0; i < jobsCount; i++ { f(`{job!~"job-[0-2]",instance!~"instance-(0|1)|"}`, streamIDs)
if i > 2 {
jobIDs = append(jobIDs, i) // match-negative-job
} instanceIDs = []int{2}
jobIDs = nil
for i := 0; i < jobsCount; i++ {
if i != 1 {
jobIDs = append(jobIDs, i)
} }
var streamIDs []streamID }
for _, instanceID := range instanceIDs { streamIDs = nil
for _, jobID := range jobIDs { for _, instanceID := range instanceIDs {
sid, _ := getStreamIDForTags(map[string]string{ for _, jobID := range jobIDs {
"instance": fmt.Sprintf("instance-%d", instanceID), sid, _ := getStreamIDForTags(map[string]string{
"job": fmt.Sprintf("job-%d", jobID), "instance": fmt.Sprintf("instance-%d", instanceID),
}) "job": fmt.Sprintf("job-%d", jobID),
streamIDs = append(streamIDs, sid) })
} streamIDs = append(streamIDs, sid)
} }
f(`{job!~"job-[0-2]",instance!~"instance-(0|1)|"}`, streamIDs) }
}) f(`{instance="instance-2",job!="job-1"}`, streamIDs)
t.Run("match-negative-job", func(t *testing.T) {
instanceIDs := []int{2}
var jobIDs []int
for i := 0; i < jobsCount; i++ {
if i != 1 {
jobIDs = append(jobIDs, i)
}
}
var streamIDs []streamID
for _, instanceID := range instanceIDs {
for _, jobID := range jobIDs {
sid, _ := getStreamIDForTags(map[string]string{
"instance": fmt.Sprintf("instance-%d", instanceID),
"job": fmt.Sprintf("job-%d", jobID),
})
streamIDs = append(streamIDs, sid)
}
}
f(`{instance="instance-2",job!="job-1"}`, streamIDs)
})
mustCloseIndexdb(idb) mustCloseIndexdb(idb)
fs.MustRemoveAll(path) fs.MustRemoveAll(path)

View file

@ -51,7 +51,7 @@ func (s *Storage) RunQuery(tenantIDs []TenantID, q *Query, stopCh <-chan struct{
resultColumnNames: resultColumnNames, resultColumnNames: resultColumnNames,
} }
workersCount := cgroup.AvailableCPUs() workersCount := cgroup.AvailableCPUs()
s.search(workersCount, so, stopCh, func(workerID uint, br *blockResult) { s.search(workersCount, so, stopCh, func(_ uint, br *blockResult) {
brs := getBlockRows() brs := getBlockRows()
cs := brs.cs cs := brs.cs

View file

@ -78,25 +78,25 @@ func TestStorageRunQuery(t *testing.T) {
s.debugFlush() s.debugFlush()
// run tests on the storage data // run tests on the storage data
t.Run("missing-tenant", func(t *testing.T) { t.Run("missing-tenant", func(_ *testing.T) {
q := mustParseQuery(`"log message"`) q := mustParseQuery(`"log message"`)
tenantID := TenantID{ tenantID := TenantID{
AccountID: 0, AccountID: 0,
ProjectID: 0, ProjectID: 0,
} }
processBlock := func(columns []BlockColumn) { processBlock := func(_ []BlockColumn) {
panic(fmt.Errorf("unexpected match")) panic(fmt.Errorf("unexpected match"))
} }
tenantIDs := []TenantID{tenantID} tenantIDs := []TenantID{tenantID}
s.RunQuery(tenantIDs, q, nil, processBlock) s.RunQuery(tenantIDs, q, nil, processBlock)
}) })
t.Run("missing-message-text", func(t *testing.T) { t.Run("missing-message-text", func(_ *testing.T) {
q := mustParseQuery(`foobar`) q := mustParseQuery(`foobar`)
tenantID := TenantID{ tenantID := TenantID{
AccountID: 1, AccountID: 1,
ProjectID: 11, ProjectID: 11,
} }
processBlock := func(columns []BlockColumn) { processBlock := func(_ []BlockColumn) {
panic(fmt.Errorf("unexpected match")) panic(fmt.Errorf("unexpected match"))
} }
tenantIDs := []TenantID{tenantID} tenantIDs := []TenantID{tenantID}
@ -168,9 +168,9 @@ func TestStorageRunQuery(t *testing.T) {
t.Fatalf("unexpected number of matching rows; got %d; want %d", n, expectedRowsCount) t.Fatalf("unexpected number of matching rows; got %d; want %d", n, expectedRowsCount)
} }
}) })
t.Run("stream-filter-mismatch", func(t *testing.T) { t.Run("stream-filter-mismatch", func(_ *testing.T) {
q := mustParseQuery(`_stream:{job="foobar",instance=~"host-.+:2345"} log`) q := mustParseQuery(`_stream:{job="foobar",instance=~"host-.+:2345"} log`)
processBlock := func(columns []BlockColumn) { processBlock := func(_ []BlockColumn) {
panic(fmt.Errorf("unexpected match")) panic(fmt.Errorf("unexpected match"))
} }
s.RunQuery(allTenantIDs, q, nil, processBlock) s.RunQuery(allTenantIDs, q, nil, processBlock)
@ -273,7 +273,7 @@ func TestStorageRunQuery(t *testing.T) {
t.Fatalf("unexpected number of rows; got %d; want %d", n, expectedRowsCount) t.Fatalf("unexpected number of rows; got %d; want %d", n, expectedRowsCount)
} }
}) })
t.Run("matching-stream-id-missing-time-range", func(t *testing.T) { t.Run("matching-stream-id-missing-time-range", func(_ *testing.T) {
minTimestamp := baseTimestamp + (rowsPerBlock+1)*1e9 minTimestamp := baseTimestamp + (rowsPerBlock+1)*1e9
maxTimestamp := baseTimestamp + (rowsPerBlock+2)*1e9 maxTimestamp := baseTimestamp + (rowsPerBlock+2)*1e9
q := mustParseQuery(fmt.Sprintf(`_stream:{job="foobar",instance="host-1:234"} _time:[%d, %d)`, minTimestamp/1e9, maxTimestamp/1e9)) q := mustParseQuery(fmt.Sprintf(`_stream:{job="foobar",instance="host-1:234"} _time:[%d, %d)`, minTimestamp/1e9, maxTimestamp/1e9))
@ -281,13 +281,13 @@ func TestStorageRunQuery(t *testing.T) {
AccountID: 1, AccountID: 1,
ProjectID: 11, ProjectID: 11,
} }
processBlock := func(columns []BlockColumn) { processBlock := func(_ []BlockColumn) {
panic(fmt.Errorf("unexpected match")) panic(fmt.Errorf("unexpected match"))
} }
tenantIDs := []TenantID{tenantID} tenantIDs := []TenantID{tenantID}
s.RunQuery(tenantIDs, q, nil, processBlock) s.RunQuery(tenantIDs, q, nil, processBlock)
}) })
t.Run("missing-time-range", func(t *testing.T) { t.Run("missing-time-range", func(_ *testing.T) {
minTimestamp := baseTimestamp + (rowsPerBlock+1)*1e9 minTimestamp := baseTimestamp + (rowsPerBlock+1)*1e9
maxTimestamp := baseTimestamp + (rowsPerBlock+2)*1e9 maxTimestamp := baseTimestamp + (rowsPerBlock+2)*1e9
q := mustParseQuery(fmt.Sprintf(`_time:[%d, %d)`, minTimestamp/1e9, maxTimestamp/1e9)) q := mustParseQuery(fmt.Sprintf(`_time:[%d, %d)`, minTimestamp/1e9, maxTimestamp/1e9))
@ -295,7 +295,7 @@ func TestStorageRunQuery(t *testing.T) {
AccountID: 1, AccountID: 1,
ProjectID: 11, ProjectID: 11,
} }
processBlock := func(columns []BlockColumn) { processBlock := func(_ []BlockColumn) {
panic(fmt.Errorf("unexpected match")) panic(fmt.Errorf("unexpected match"))
} }
tenantIDs := []TenantID{tenantID} tenantIDs := []TenantID{tenantID}
@ -392,7 +392,7 @@ func TestStorageSearch(t *testing.T) {
} }
} }
t.Run("missing-tenant-smaller-than-existing", func(t *testing.T) { t.Run("missing-tenant-smaller-than-existing", func(_ *testing.T) {
tenantID := TenantID{ tenantID := TenantID{
AccountID: 0, AccountID: 0,
ProjectID: 0, ProjectID: 0,
@ -405,12 +405,12 @@ func TestStorageSearch(t *testing.T) {
filter: f, filter: f,
resultColumnNames: []string{"_msg"}, resultColumnNames: []string{"_msg"},
} }
processBlock := func(workerID uint, br *blockResult) { processBlock := func(_ uint, _ *blockResult) {
panic(fmt.Errorf("unexpected match")) panic(fmt.Errorf("unexpected match"))
} }
s.search(workersCount, so, nil, processBlock) s.search(workersCount, so, nil, processBlock)
}) })
t.Run("missing-tenant-bigger-than-existing", func(t *testing.T) { t.Run("missing-tenant-bigger-than-existing", func(_ *testing.T) {
tenantID := TenantID{ tenantID := TenantID{
AccountID: tenantsCount + 1, AccountID: tenantsCount + 1,
ProjectID: 0, ProjectID: 0,
@ -423,12 +423,12 @@ func TestStorageSearch(t *testing.T) {
filter: f, filter: f,
resultColumnNames: []string{"_msg"}, resultColumnNames: []string{"_msg"},
} }
processBlock := func(workerID uint, br *blockResult) { processBlock := func(_ uint, _ *blockResult) {
panic(fmt.Errorf("unexpected match")) panic(fmt.Errorf("unexpected match"))
} }
s.search(workersCount, so, nil, processBlock) s.search(workersCount, so, nil, processBlock)
}) })
t.Run("missing-tenant-middle", func(t *testing.T) { t.Run("missing-tenant-middle", func(_ *testing.T) {
tenantID := TenantID{ tenantID := TenantID{
AccountID: 1, AccountID: 1,
ProjectID: 0, ProjectID: 0,
@ -441,7 +441,7 @@ func TestStorageSearch(t *testing.T) {
filter: f, filter: f,
resultColumnNames: []string{"_msg"}, resultColumnNames: []string{"_msg"},
} }
processBlock := func(workerID uint, br *blockResult) { processBlock := func(_ uint, _ *blockResult) {
panic(fmt.Errorf("unexpected match")) panic(fmt.Errorf("unexpected match"))
} }
s.search(workersCount, so, nil, processBlock) s.search(workersCount, so, nil, processBlock)
@ -461,7 +461,7 @@ func TestStorageSearch(t *testing.T) {
resultColumnNames: []string{"_msg"}, resultColumnNames: []string{"_msg"},
} }
var rowsCount atomic.Uint32 var rowsCount atomic.Uint32
processBlock := func(workerID uint, br *blockResult) { processBlock := func(_ uint, br *blockResult) {
if !br.streamID.tenantID.equal(&tenantID) { if !br.streamID.tenantID.equal(&tenantID) {
panic(fmt.Errorf("unexpected tenantID; got %s; want %s", &br.streamID.tenantID, &tenantID)) panic(fmt.Errorf("unexpected tenantID; got %s; want %s", &br.streamID.tenantID, &tenantID))
} }
@ -485,7 +485,7 @@ func TestStorageSearch(t *testing.T) {
resultColumnNames: []string{"_msg"}, resultColumnNames: []string{"_msg"},
} }
var rowsCount atomic.Uint32 var rowsCount atomic.Uint32
processBlock := func(workerID uint, br *blockResult) { processBlock := func(_ uint, br *blockResult) {
rowsCount.Add(uint32(br.RowsCount())) rowsCount.Add(uint32(br.RowsCount()))
} }
s.search(workersCount, so, nil, processBlock) s.search(workersCount, so, nil, processBlock)
@ -495,7 +495,7 @@ func TestStorageSearch(t *testing.T) {
t.Fatalf("unexpected number of matching rows; got %d; want %d", n, expectedRowsCount) t.Fatalf("unexpected number of matching rows; got %d; want %d", n, expectedRowsCount)
} }
}) })
t.Run("stream-filter-mismatch", func(t *testing.T) { t.Run("stream-filter-mismatch", func(_ *testing.T) {
sf := mustNewStreamFilter(`{job="foobar",instance=~"host-.+:2345"}`) sf := mustNewStreamFilter(`{job="foobar",instance=~"host-.+:2345"}`)
minTimestamp := baseTimestamp minTimestamp := baseTimestamp
maxTimestamp := baseTimestamp + rowsPerBlock*1e9 + blocksPerStream maxTimestamp := baseTimestamp + rowsPerBlock*1e9 + blocksPerStream
@ -505,7 +505,7 @@ func TestStorageSearch(t *testing.T) {
filter: f, filter: f,
resultColumnNames: []string{"_msg"}, resultColumnNames: []string{"_msg"},
} }
processBlock := func(workerID uint, br *blockResult) { processBlock := func(_ uint, _ *blockResult) {
panic(fmt.Errorf("unexpected match")) panic(fmt.Errorf("unexpected match"))
} }
s.search(workersCount, so, nil, processBlock) s.search(workersCount, so, nil, processBlock)
@ -526,7 +526,7 @@ func TestStorageSearch(t *testing.T) {
resultColumnNames: []string{"_msg"}, resultColumnNames: []string{"_msg"},
} }
var rowsCount atomic.Uint32 var rowsCount atomic.Uint32
processBlock := func(workerID uint, br *blockResult) { processBlock := func(_ uint, br *blockResult) {
if !br.streamID.tenantID.equal(&tenantID) { if !br.streamID.tenantID.equal(&tenantID) {
panic(fmt.Errorf("unexpected tenantID; got %s; want %s", &br.streamID.tenantID, &tenantID)) panic(fmt.Errorf("unexpected tenantID; got %s; want %s", &br.streamID.tenantID, &tenantID))
} }
@ -555,7 +555,7 @@ func TestStorageSearch(t *testing.T) {
resultColumnNames: []string{"_msg"}, resultColumnNames: []string{"_msg"},
} }
var rowsCount atomic.Uint32 var rowsCount atomic.Uint32
processBlock := func(workerID uint, br *blockResult) { processBlock := func(_ uint, br *blockResult) {
if !br.streamID.tenantID.equal(&tenantID) { if !br.streamID.tenantID.equal(&tenantID) {
panic(fmt.Errorf("unexpected tenantID; got %s; want %s", &br.streamID.tenantID, &tenantID)) panic(fmt.Errorf("unexpected tenantID; got %s; want %s", &br.streamID.tenantID, &tenantID))
} }
@ -592,7 +592,7 @@ func TestStorageSearch(t *testing.T) {
resultColumnNames: []string{"_msg"}, resultColumnNames: []string{"_msg"},
} }
var rowsCount atomic.Uint32 var rowsCount atomic.Uint32
processBlock := func(workerID uint, br *blockResult) { processBlock := func(_ uint, br *blockResult) {
if !br.streamID.tenantID.equal(&tenantID) { if !br.streamID.tenantID.equal(&tenantID) {
panic(fmt.Errorf("unexpected tenantID; got %s; want %s", &br.streamID.tenantID, &tenantID)) panic(fmt.Errorf("unexpected tenantID; got %s; want %s", &br.streamID.tenantID, &tenantID))
} }
@ -620,7 +620,7 @@ func TestStorageSearch(t *testing.T) {
resultColumnNames: []string{"_msg"}, resultColumnNames: []string{"_msg"},
} }
var rowsCount atomic.Uint32 var rowsCount atomic.Uint32
processBlock := func(workerID uint, br *blockResult) { processBlock := func(_ uint, br *blockResult) {
rowsCount.Add(uint32(br.RowsCount())) rowsCount.Add(uint32(br.RowsCount()))
} }
s.search(workersCount, so, nil, processBlock) s.search(workersCount, so, nil, processBlock)
@ -630,7 +630,7 @@ func TestStorageSearch(t *testing.T) {
t.Fatalf("unexpected number of rows; got %d; want %d", n, expectedRowsCount) t.Fatalf("unexpected number of rows; got %d; want %d", n, expectedRowsCount)
} }
}) })
t.Run("matching-stream-id-missing-time-range", func(t *testing.T) { t.Run("matching-stream-id-missing-time-range", func(_ *testing.T) {
sf := mustNewStreamFilter(`{job="foobar",instance="host-1:234"}`) sf := mustNewStreamFilter(`{job="foobar",instance="host-1:234"}`)
tenantID := TenantID{ tenantID := TenantID{
AccountID: 1, AccountID: 1,
@ -644,7 +644,7 @@ func TestStorageSearch(t *testing.T) {
filter: f, filter: f,
resultColumnNames: []string{"_msg"}, resultColumnNames: []string{"_msg"},
} }
processBlock := func(workerID uint, br *blockResult) { processBlock := func(_ uint, _ *blockResult) {
panic(fmt.Errorf("unexpected match")) panic(fmt.Errorf("unexpected match"))
} }
s.search(workersCount, so, nil, processBlock) s.search(workersCount, so, nil, processBlock)

View file

@ -32,7 +32,7 @@ func GetServerTLSConfig(tlsCertFile, tlsKeyFile, tlsMinVersion string, tlsCipher
MinVersion: minVersion, MinVersion: minVersion,
// Do not set MaxVersion, since this has no sense from security PoV. // Do not set MaxVersion, since this has no sense from security PoV.
// This can only result in lower security level if improperly set. // This can only result in lower security level if improperly set.
GetCertificate: func(info *tls.ClientHelloInfo) (*tls.Certificate, error) { GetCertificate: func(_ *tls.ClientHelloInfo) (*tls.Certificate, error) {
certLock.Lock() certLock.Lock()
defer certLock.Unlock() defer certLock.Unlock()
if fasttime.UnixTimestamp() > certDeadline { if fasttime.UnixTimestamp() > certDeadline {

View file

@ -22,7 +22,7 @@ func TestQueueOpenClose(t *testing.T) {
} }
func TestQueueOpen(t *testing.T) { func TestQueueOpen(t *testing.T) {
t.Run("invalid-metainfo", func(t *testing.T) { t.Run("invalid-metainfo", func(_ *testing.T) {
path := "queue-open-invalid-metainfo" path := "queue-open-invalid-metainfo"
mustCreateDir(path) mustCreateDir(path)
mustCreateFile(filepath.Join(path, metainfoFilename), "foobarbaz") mustCreateFile(filepath.Join(path, metainfoFilename), "foobarbaz")
@ -30,7 +30,7 @@ func TestQueueOpen(t *testing.T) {
q.MustClose() q.MustClose()
mustDeleteDir(path) mustDeleteDir(path)
}) })
t.Run("junk-files-and-dirs", func(t *testing.T) { t.Run("junk-files-and-dirs", func(_ *testing.T) {
path := "queue-open-junk-files-and-dir" path := "queue-open-junk-files-and-dir"
mustCreateDir(path) mustCreateDir(path)
mustCreateEmptyMetainfo(path, "foobar") mustCreateEmptyMetainfo(path, "foobar")
@ -40,7 +40,7 @@ func TestQueueOpen(t *testing.T) {
q.MustClose() q.MustClose()
mustDeleteDir(path) mustDeleteDir(path)
}) })
t.Run("invalid-chunk-offset", func(t *testing.T) { t.Run("invalid-chunk-offset", func(_ *testing.T) {
path := "queue-open-invalid-chunk-offset" path := "queue-open-invalid-chunk-offset"
mustCreateDir(path) mustCreateDir(path)
mustCreateEmptyMetainfo(path, "foobar") mustCreateEmptyMetainfo(path, "foobar")
@ -49,7 +49,7 @@ func TestQueueOpen(t *testing.T) {
q.MustClose() q.MustClose()
mustDeleteDir(path) mustDeleteDir(path)
}) })
t.Run("too-new-chunk", func(t *testing.T) { t.Run("too-new-chunk", func(_ *testing.T) {
path := "queue-open-too-new-chunk" path := "queue-open-too-new-chunk"
mustCreateDir(path) mustCreateDir(path)
mustCreateEmptyMetainfo(path, "foobar") mustCreateEmptyMetainfo(path, "foobar")
@ -88,7 +88,7 @@ func TestQueueOpen(t *testing.T) {
q.MustClose() q.MustClose()
mustDeleteDir(path) mustDeleteDir(path)
}) })
t.Run("metainfo-dir", func(t *testing.T) { t.Run("metainfo-dir", func(_ *testing.T) {
path := "queue-open-metainfo-dir" path := "queue-open-metainfo-dir"
mustCreateDir(path) mustCreateDir(path)
mustCreateDir(filepath.Join(path, metainfoFilename)) mustCreateDir(filepath.Join(path, metainfoFilename))
@ -112,7 +112,7 @@ func TestQueueOpen(t *testing.T) {
q.MustClose() q.MustClose()
mustDeleteDir(path) mustDeleteDir(path)
}) })
t.Run("invalid-writer-file-size", func(t *testing.T) { t.Run("invalid-writer-file-size", func(_ *testing.T) {
path := "too-small-reader-file" path := "too-small-reader-file"
mustCreateDir(path) mustCreateDir(path)
mustCreateEmptyMetainfo(path, "foobar") mustCreateEmptyMetainfo(path, "foobar")

View file

@ -101,12 +101,12 @@ func TestLoadRelabelConfigsFailure(t *testing.T) {
t.Fatalf("unexpected non-empty rcs: %#v", rcs) t.Fatalf("unexpected non-empty rcs: %#v", rcs)
} }
} }
t.Run("non-existing-file", func(t *testing.T) {
f("testdata/non-exsiting-file") // non-existing-file
}) f("testdata/non-exsiting-file")
t.Run("invalid-file", func(t *testing.T) {
f("testdata/invalid_config.yml") // invalid-file
}) f("testdata/invalid_config.yml")
} }
func TestParsedConfigsString(t *testing.T) { func TestParsedConfigsString(t *testing.T) {
@ -209,410 +209,410 @@ func TestParseRelabelConfigsFailure(t *testing.T) {
t.Fatalf("unexpected non-empty pcs: %#v", pcs) t.Fatalf("unexpected non-empty pcs: %#v", pcs)
} }
} }
t.Run("invalid-regex", func(t *testing.T) {
f([]RelabelConfig{ // invalid regex
{ f([]RelabelConfig{
SourceLabels: []string{"aaa"}, {
TargetLabel: "xxx", SourceLabels: []string{"aaa"},
Regex: &MultiLineRegex{ TargetLabel: "xxx",
S: "foo[bar", Regex: &MultiLineRegex{
}, S: "foo[bar",
}, },
}) },
}) })
t.Run("replace-missing-target-label", func(t *testing.T) {
f([]RelabelConfig{ // replace-missing-target-label
{ f([]RelabelConfig{
Action: "replace", {
SourceLabels: []string{"foo"}, Action: "replace",
SourceLabels: []string{"foo"},
},
})
// replace_all-missing-source-labels
f([]RelabelConfig{
{
Action: "replace_all",
TargetLabel: "xxx",
},
})
// replace_all-missing-target-label
f([]RelabelConfig{
{
Action: "replace_all",
SourceLabels: []string{"foo"},
},
})
// keep-missing-source-labels
f([]RelabelConfig{
{
Action: "keep",
},
})
// keep_if_contains-missing-target-label
f([]RelabelConfig{
{
Action: "keep_if_contains",
SourceLabels: []string{"foo"},
},
})
// keep_if_contains-missing-source-labels
f([]RelabelConfig{
{
Action: "keep_if_contains",
TargetLabel: "foo",
},
})
// keep_if_contains-unused-regex
f([]RelabelConfig{
{
Action: "keep_if_contains",
TargetLabel: "foo",
SourceLabels: []string{"bar"},
Regex: &MultiLineRegex{
S: "bar",
}, },
}) },
}) })
t.Run("replace_all-missing-source-labels", func(t *testing.T) {
f([]RelabelConfig{ // drop_if_contains-missing-target-label
{ f([]RelabelConfig{
Action: "replace_all", {
TargetLabel: "xxx", Action: "drop_if_contains",
SourceLabels: []string{"foo"},
},
})
// drop_if_contains-missing-source-labels
f([]RelabelConfig{
{
Action: "drop_if_contains",
TargetLabel: "foo",
},
})
// drop_if_contains-unused-regex
f([]RelabelConfig{
{
Action: "drop_if_contains",
TargetLabel: "foo",
SourceLabels: []string{"bar"},
Regex: &MultiLineRegex{
S: "bar",
}, },
}) },
}) })
t.Run("replace_all-missing-target-label", func(t *testing.T) {
f([]RelabelConfig{ // keep_if_equal-missing-source-labels
{ f([]RelabelConfig{
Action: "replace_all", {
SourceLabels: []string{"foo"}, Action: "keep_if_equal",
},
})
// keep_if_equal-single-source-label
f([]RelabelConfig{
{
Action: "keep_if_equal",
SourceLabels: []string{"foo"},
},
})
// keep_if_equal-unused-target-label
f([]RelabelConfig{
{
Action: "keep_if_equal",
SourceLabels: []string{"foo", "bar"},
TargetLabel: "foo",
},
})
// keep_if_equal-unused-regex
f([]RelabelConfig{
{
Action: "keep_if_equal",
SourceLabels: []string{"foo", "bar"},
Regex: &MultiLineRegex{
S: "bar",
}, },
}) },
}) })
t.Run("keep-missing-source-labels", func(t *testing.T) {
f([]RelabelConfig{ // drop_if_equal-missing-source-labels
{ f([]RelabelConfig{
Action: "keep", {
Action: "drop_if_equal",
},
})
// drop_if_equal-single-source-label
f([]RelabelConfig{
{
Action: "drop_if_equal",
SourceLabels: []string{"foo"},
},
})
// drop_if_equal-unused-target-label
f([]RelabelConfig{
{
Action: "drop_if_equal",
SourceLabels: []string{"foo", "bar"},
TargetLabel: "foo",
},
})
// drop_if_equal-unused-regex
f([]RelabelConfig{
{
Action: "drop_if_equal",
SourceLabels: []string{"foo", "bar"},
Regex: &MultiLineRegex{
S: "bar",
}, },
}) },
}) })
t.Run("keep_if_contains-missing-target-label", func(t *testing.T) {
f([]RelabelConfig{ // keepequal-missing-source-labels
{ f([]RelabelConfig{
Action: "keep_if_contains", {
SourceLabels: []string{"foo"}, Action: "keepequal",
},
})
// keepequal-missing-target-label
f([]RelabelConfig{
{
Action: "keepequal",
SourceLabels: []string{"foo"},
},
})
// keepequal-unused-regex
f([]RelabelConfig{
{
Action: "keepequal",
SourceLabels: []string{"foo"},
TargetLabel: "foo",
Regex: &MultiLineRegex{
S: "bar",
}, },
}) },
}) })
t.Run("keep_if_contains-missing-source-labels", func(t *testing.T) {
f([]RelabelConfig{ // dropequal-missing-source-labels
{ f([]RelabelConfig{
Action: "keep_if_contains", {
TargetLabel: "foo", Action: "dropequal",
},
})
// dropequal-missing-target-label
f([]RelabelConfig{
{
Action: "dropequal",
SourceLabels: []string{"foo"},
},
})
// dropequal-unused-regex
f([]RelabelConfig{
{
Action: "dropequal",
SourceLabels: []string{"foo"},
TargetLabel: "foo",
Regex: &MultiLineRegex{
S: "bar",
}, },
}) },
}) })
t.Run("keep_if_contains-unused-regex", func(t *testing.T) {
f([]RelabelConfig{ // drop-missing-source-labels
{ f([]RelabelConfig{
Action: "keep_if_contains", {
TargetLabel: "foo", Action: "drop",
SourceLabels: []string{"bar"}, },
Regex: &MultiLineRegex{ })
S: "bar",
}, // hashmod-missing-source-labels
f([]RelabelConfig{
{
Action: "hashmod",
TargetLabel: "aaa",
Modulus: 123,
},
})
// hashmod-missing-target-label
f([]RelabelConfig{
{
Action: "hashmod",
SourceLabels: []string{"aaa"},
Modulus: 123,
},
})
// hashmod-missing-modulus
f([]RelabelConfig{
{
Action: "hashmod",
SourceLabels: []string{"aaa"},
TargetLabel: "xxx",
},
})
// invalid-action
f([]RelabelConfig{
{
Action: "invalid-action",
},
})
// drop_metrics-missing-regex
f([]RelabelConfig{
{
Action: "drop_metrics",
},
})
// drop_metrics-non-empty-source-labels
f([]RelabelConfig{
{
Action: "drop_metrics",
SourceLabels: []string{"foo"},
Regex: &MultiLineRegex{
S: "bar",
}, },
}) },
}) })
t.Run("drop_if_contains-missing-target-label", func(t *testing.T) {
f([]RelabelConfig{ // keep_metrics-missing-regex
{ f([]RelabelConfig{
Action: "drop_if_contains", {
SourceLabels: []string{"foo"}, Action: "keep_metrics",
},
})
// keep_metrics-non-empty-source-labels
f([]RelabelConfig{
{
Action: "keep_metrics",
SourceLabels: []string{"foo"},
Regex: &MultiLineRegex{
S: "bar",
}, },
}) },
}) })
t.Run("drop_if_contains-missing-source-labels", func(t *testing.T) {
f([]RelabelConfig{ // uppercase-missing-sourceLabels
{ f([]RelabelConfig{
Action: "drop_if_contains", {
TargetLabel: "foo", Action: "uppercase",
TargetLabel: "foobar",
},
})
// lowercase-missing-targetLabel
f([]RelabelConfig{
{
Action: "lowercase",
SourceLabels: []string{"foobar"},
},
})
// graphite-missing-match
f([]RelabelConfig{
{
Action: "graphite",
Labels: map[string]string{
"foo": "bar",
}, },
}) },
}) })
t.Run("drop_if_contains-unused-regex", func(t *testing.T) {
f([]RelabelConfig{ // graphite-missing-labels
{ f([]RelabelConfig{
Action: "drop_if_contains", {
TargetLabel: "foo", Action: "graphite",
SourceLabels: []string{"bar"}, Match: "foo.*.bar",
Regex: &MultiLineRegex{ },
S: "bar", })
},
// graphite-superflouous-sourceLabels
f([]RelabelConfig{
{
Action: "graphite",
Match: "foo.*.bar",
Labels: map[string]string{
"foo": "bar",
}, },
}) SourceLabels: []string{"foo"},
},
}) })
t.Run("keep_if_equal-missing-source-labels", func(t *testing.T) {
f([]RelabelConfig{ // graphite-superflouous-targetLabel
{ f([]RelabelConfig{
Action: "keep_if_equal", {
Action: "graphite",
Match: "foo.*.bar",
Labels: map[string]string{
"foo": "bar",
}, },
}) TargetLabel: "foo",
}) },
t.Run("keep_if_equal-single-source-label", func(t *testing.T) {
f([]RelabelConfig{
{
Action: "keep_if_equal",
SourceLabels: []string{"foo"},
},
})
})
t.Run("keep_if_equal-unused-target-label", func(t *testing.T) {
f([]RelabelConfig{
{
Action: "keep_if_equal",
SourceLabels: []string{"foo", "bar"},
TargetLabel: "foo",
},
})
})
t.Run("keep_if_equal-unused-regex", func(t *testing.T) {
f([]RelabelConfig{
{
Action: "keep_if_equal",
SourceLabels: []string{"foo", "bar"},
Regex: &MultiLineRegex{
S: "bar",
},
},
})
})
t.Run("drop_if_equal-missing-source-labels", func(t *testing.T) {
f([]RelabelConfig{
{
Action: "drop_if_equal",
},
})
})
t.Run("drop_if_equal-single-source-label", func(t *testing.T) {
f([]RelabelConfig{
{
Action: "drop_if_equal",
SourceLabels: []string{"foo"},
},
})
})
t.Run("drop_if_equal-unused-target-label", func(t *testing.T) {
f([]RelabelConfig{
{
Action: "drop_if_equal",
SourceLabels: []string{"foo", "bar"},
TargetLabel: "foo",
},
})
})
t.Run("drop_if_equal-unused-regex", func(t *testing.T) {
f([]RelabelConfig{
{
Action: "drop_if_equal",
SourceLabels: []string{"foo", "bar"},
Regex: &MultiLineRegex{
S: "bar",
},
},
})
})
t.Run("keepequal-missing-source-labels", func(t *testing.T) {
f([]RelabelConfig{
{
Action: "keepequal",
},
})
})
t.Run("keepequal-missing-target-label", func(t *testing.T) {
f([]RelabelConfig{
{
Action: "keepequal",
SourceLabels: []string{"foo"},
},
})
})
t.Run("keepequal-unused-regex", func(t *testing.T) {
f([]RelabelConfig{
{
Action: "keepequal",
SourceLabels: []string{"foo"},
TargetLabel: "foo",
Regex: &MultiLineRegex{
S: "bar",
},
},
})
})
t.Run("dropequal-missing-source-labels", func(t *testing.T) {
f([]RelabelConfig{
{
Action: "dropequal",
},
})
})
t.Run("dropequal-missing-target-label", func(t *testing.T) {
f([]RelabelConfig{
{
Action: "dropequal",
SourceLabels: []string{"foo"},
},
})
})
t.Run("dropequal-unused-regex", func(t *testing.T) {
f([]RelabelConfig{
{
Action: "dropequal",
SourceLabels: []string{"foo"},
TargetLabel: "foo",
Regex: &MultiLineRegex{
S: "bar",
},
},
})
})
t.Run("drop-missing-source-labels", func(t *testing.T) {
f([]RelabelConfig{
{
Action: "drop",
},
})
})
t.Run("hashmod-missing-source-labels", func(t *testing.T) {
f([]RelabelConfig{
{
Action: "hashmod",
TargetLabel: "aaa",
Modulus: 123,
},
})
})
t.Run("hashmod-missing-target-label", func(t *testing.T) {
f([]RelabelConfig{
{
Action: "hashmod",
SourceLabels: []string{"aaa"},
Modulus: 123,
},
})
})
t.Run("hashmod-missing-modulus", func(t *testing.T) {
f([]RelabelConfig{
{
Action: "hashmod",
SourceLabels: []string{"aaa"},
TargetLabel: "xxx",
},
})
})
t.Run("invalid-action", func(t *testing.T) {
f([]RelabelConfig{
{
Action: "invalid-action",
},
})
})
t.Run("drop_metrics-missing-regex", func(t *testing.T) {
f([]RelabelConfig{
{
Action: "drop_metrics",
},
})
})
t.Run("drop_metrics-non-empty-source-labels", func(t *testing.T) {
f([]RelabelConfig{
{
Action: "drop_metrics",
SourceLabels: []string{"foo"},
Regex: &MultiLineRegex{
S: "bar",
},
},
})
})
t.Run("keep_metrics-missing-regex", func(t *testing.T) {
f([]RelabelConfig{
{
Action: "keep_metrics",
},
})
})
t.Run("keep_metrics-non-empty-source-labels", func(t *testing.T) {
f([]RelabelConfig{
{
Action: "keep_metrics",
SourceLabels: []string{"foo"},
Regex: &MultiLineRegex{
S: "bar",
},
},
})
})
t.Run("uppercase-missing-sourceLabels", func(t *testing.T) {
f([]RelabelConfig{
{
Action: "uppercase",
TargetLabel: "foobar",
},
})
})
t.Run("lowercase-missing-targetLabel", func(t *testing.T) {
f([]RelabelConfig{
{
Action: "lowercase",
SourceLabels: []string{"foobar"},
},
})
})
t.Run("graphite-missing-match", func(t *testing.T) {
f([]RelabelConfig{
{
Action: "graphite",
Labels: map[string]string{
"foo": "bar",
},
},
})
})
t.Run("graphite-missing-labels", func(t *testing.T) {
f([]RelabelConfig{
{
Action: "graphite",
Match: "foo.*.bar",
},
})
})
t.Run("graphite-superflouous-sourceLabels", func(t *testing.T) {
f([]RelabelConfig{
{
Action: "graphite",
Match: "foo.*.bar",
Labels: map[string]string{
"foo": "bar",
},
SourceLabels: []string{"foo"},
},
})
})
t.Run("graphite-superflouous-targetLabel", func(t *testing.T) {
f([]RelabelConfig{
{
Action: "graphite",
Match: "foo.*.bar",
Labels: map[string]string{
"foo": "bar",
},
TargetLabel: "foo",
},
})
}) })
// graphite-superflouous-replacement
replacement := "foo" replacement := "foo"
t.Run("graphite-superflouous-replacement", func(t *testing.T) { f([]RelabelConfig{
f([]RelabelConfig{ {
{ Action: "graphite",
Action: "graphite", Match: "foo.*.bar",
Match: "foo.*.bar", Labels: map[string]string{
Labels: map[string]string{ "foo": "bar",
"foo": "bar",
},
Replacement: &replacement,
}, },
}) Replacement: &replacement,
},
}) })
// graphite-superflouous-regex
var re MultiLineRegex var re MultiLineRegex
t.Run("graphite-superflouous-regex", func(t *testing.T) { f([]RelabelConfig{
f([]RelabelConfig{ {
{ Action: "graphite",
Action: "graphite", Match: "foo.*.bar",
Match: "foo.*.bar", Labels: map[string]string{
Labels: map[string]string{ "foo": "bar",
"foo": "bar",
},
Regex: &re,
}, },
}) Regex: &re,
},
}) })
t.Run("non-graphite-superflouos-match", func(t *testing.T) {
f([]RelabelConfig{ // non-graphite-superflouos-match
{ f([]RelabelConfig{
Action: "uppercase", {
SourceLabels: []string{"foo"}, Action: "uppercase",
TargetLabel: "foo", SourceLabels: []string{"foo"},
Match: "aaa", TargetLabel: "foo",
}, Match: "aaa",
}) },
}) })
t.Run("non-graphite-superflouos-labels", func(t *testing.T) {
f([]RelabelConfig{ // non-graphite-superflouos-labels
{ f([]RelabelConfig{
Action: "uppercase", {
SourceLabels: []string{"foo"}, Action: "uppercase",
TargetLabel: "foo", SourceLabels: []string{"foo"},
Labels: map[string]string{ TargetLabel: "foo",
"foo": "Bar", Labels: map[string]string{
}, "foo": "Bar",
}, },
}) },
}) })
} }

File diff suppressed because it is too large Load diff

View file

@ -54,7 +54,7 @@ func newClient(ctx context.Context, sw *ScrapeWork) (*client, error) {
setHeaders := func(req *http.Request) error { setHeaders := func(req *http.Request) error {
return sw.AuthConfig.SetHeaders(req, true) return sw.AuthConfig.SetHeaders(req, true)
} }
setProxyHeaders := func(req *http.Request) error { setProxyHeaders := func(_ *http.Request) error {
return nil return nil
} }
proxyURL := sw.ProxyURL proxyURL := sw.ProxyURL
@ -90,7 +90,7 @@ func newClient(ctx context.Context, sw *ScrapeWork) (*client, error) {
Timeout: sw.ScrapeTimeout, Timeout: sw.ScrapeTimeout,
} }
if sw.DenyRedirects { if sw.DenyRedirects {
hc.CheckRedirect = func(req *http.Request, via []*http.Request) error { hc.CheckRedirect = func(_ *http.Request, _ []*http.Request) error {
return http.ErrUseLastResponse return http.ErrUseLastResponse
} }
} }

View file

@ -144,20 +144,20 @@ func NewClient(apiServer string, ac *promauth.Config, proxyURL *proxy.URL, proxy
}, },
} }
setHTTPHeaders := func(req *http.Request) error { return nil } setHTTPHeaders := func(_ *http.Request) error { return nil }
if ac != nil { if ac != nil {
setHTTPHeaders = func(req *http.Request) error { setHTTPHeaders = func(req *http.Request) error {
return ac.SetHeaders(req, true) return ac.SetHeaders(req, true)
} }
} }
if httpCfg.FollowRedirects != nil && !*httpCfg.FollowRedirects { if httpCfg.FollowRedirects != nil && !*httpCfg.FollowRedirects {
checkRedirect := func(req *http.Request, via []*http.Request) error { checkRedirect := func(_ *http.Request, _ []*http.Request) error {
return http.ErrUseLastResponse return http.ErrUseLastResponse
} }
client.CheckRedirect = checkRedirect client.CheckRedirect = checkRedirect
blockingClient.CheckRedirect = checkRedirect blockingClient.CheckRedirect = checkRedirect
} }
setHTTPProxyHeaders := func(req *http.Request) error { return nil } setHTTPProxyHeaders := func(_ *http.Request) error { return nil }
if proxyAC != nil { if proxyAC != nil {
setHTTPProxyHeaders = func(req *http.Request) error { setHTTPProxyHeaders = func(req *http.Request) error {
return proxyURL.SetHeaders(proxyAC, req) return proxyURL.SetHeaders(proxyAC, req)

View file

@ -141,7 +141,7 @@ func runScraper(configFile string, pushData func(at *auth.Token, wr *prompbmarsh
scs.add("nomad_sd_configs", *nomad.SDCheckInterval, func(cfg *Config, swsPrev []*ScrapeWork) []*ScrapeWork { return cfg.getNomadSDScrapeWork(swsPrev) }) scs.add("nomad_sd_configs", *nomad.SDCheckInterval, func(cfg *Config, swsPrev []*ScrapeWork) []*ScrapeWork { return cfg.getNomadSDScrapeWork(swsPrev) })
scs.add("openstack_sd_configs", *openstack.SDCheckInterval, func(cfg *Config, swsPrev []*ScrapeWork) []*ScrapeWork { return cfg.getOpenStackSDScrapeWork(swsPrev) }) scs.add("openstack_sd_configs", *openstack.SDCheckInterval, func(cfg *Config, swsPrev []*ScrapeWork) []*ScrapeWork { return cfg.getOpenStackSDScrapeWork(swsPrev) })
scs.add("yandexcloud_sd_configs", *yandexcloud.SDCheckInterval, func(cfg *Config, swsPrev []*ScrapeWork) []*ScrapeWork { return cfg.getYandexCloudSDScrapeWork(swsPrev) }) scs.add("yandexcloud_sd_configs", *yandexcloud.SDCheckInterval, func(cfg *Config, swsPrev []*ScrapeWork) []*ScrapeWork { return cfg.getYandexCloudSDScrapeWork(swsPrev) })
scs.add("static_configs", 0, func(cfg *Config, swsPrev []*ScrapeWork) []*ScrapeWork { return cfg.getStaticScrapeWork() }) scs.add("static_configs", 0, func(cfg *Config, _ []*ScrapeWork) []*ScrapeWork { return cfg.getStaticScrapeWork() })
var tickerCh <-chan time.Time var tickerCh <-chan time.Time
if *configCheckInterval > 0 { if *configCheckInterval > 0 {

View file

@ -90,14 +90,14 @@ func TestScrapeWorkScrapeInternalFailure(t *testing.T) {
} }
readDataCalls := 0 readDataCalls := 0
sw.ReadData = func(dst *bytesutil.ByteBuffer) error { sw.ReadData = func(_ *bytesutil.ByteBuffer) error {
readDataCalls++ readDataCalls++
return fmt.Errorf("error when reading data") return fmt.Errorf("error when reading data")
} }
pushDataCalls := 0 pushDataCalls := 0
var pushDataErr error var pushDataErr error
sw.PushData = func(at *auth.Token, wr *prompbmarshal.WriteRequest) { sw.PushData = func(_ *auth.Token, wr *prompbmarshal.WriteRequest) {
if err := expectEqualTimeseries(wr.Timeseries, timeseriesExpected); err != nil { if err := expectEqualTimeseries(wr.Timeseries, timeseriesExpected); err != nil {
pushDataErr = fmt.Errorf("unexpected data pushed: %w\ngot\n%#v\nwant\n%#v", err, wr.Timeseries, timeseriesExpected) pushDataErr = fmt.Errorf("unexpected data pushed: %w\ngot\n%#v\nwant\n%#v", err, wr.Timeseries, timeseriesExpected)
} }
@ -139,7 +139,7 @@ func TestScrapeWorkScrapeInternalSuccess(t *testing.T) {
pushDataCalls := 0 pushDataCalls := 0
var pushDataErr error var pushDataErr error
sw.PushData = func(at *auth.Token, wr *prompbmarshal.WriteRequest) { sw.PushData = func(_ *auth.Token, wr *prompbmarshal.WriteRequest) {
pushDataCalls++ pushDataCalls++
if len(wr.Timeseries) > len(timeseriesExpected) { if len(wr.Timeseries) > len(timeseriesExpected) {
pushDataErr = fmt.Errorf("too many time series obtained; got %d; want %d\ngot\n%+v\nwant\n%+v", pushDataErr = fmt.Errorf("too many time series obtained; got %d; want %d\ngot\n%+v\nwant\n%+v",
@ -721,7 +721,7 @@ func TestSendStaleSeries(t *testing.T) {
defer common.StopUnmarshalWorkers() defer common.StopUnmarshalWorkers()
var staleMarks int var staleMarks int
sw.PushData = func(at *auth.Token, wr *prompbmarshal.WriteRequest) { sw.PushData = func(_ *auth.Token, wr *prompbmarshal.WriteRequest) {
staleMarks += len(wr.Timeseries) staleMarks += len(wr.Timeseries)
} }
sw.sendStaleSeries(lastScrape, currScrape, 0, false) sw.sendStaleSeries(lastScrape, currScrape, 0, false)

View file

@ -84,7 +84,7 @@ vm_tcplistener_write_calls_total{name="https", addr=":443"} 132356
var sw scrapeWork var sw scrapeWork
sw.Config = &ScrapeWork{} sw.Config = &ScrapeWork{}
sw.ReadData = readDataFunc sw.ReadData = readDataFunc
sw.PushData = func(at *auth.Token, wr *prompbmarshal.WriteRequest) {} sw.PushData = func(_ *auth.Token, _ *prompbmarshal.WriteRequest) {}
tsmGlobal.Register(&sw) tsmGlobal.Register(&sw)
timestamp := int64(0) timestamp := int64(0)
for pb.Next() { for pb.Next() {

View file

@ -14,7 +14,7 @@ func TestParseFailure(t *testing.T) {
f := func(req string) { f := func(req string) {
t.Helper() t.Helper()
callback := func(rows []newrelic.Row) error { callback := func(_ []newrelic.Row) error {
panic(fmt.Errorf("unexpected call into callback")) panic(fmt.Errorf("unexpected call into callback"))
} }
r := bytes.NewReader([]byte(req)) r := bytes.NewReader([]byte(req))

View file

@ -24,7 +24,7 @@ func BenchmarkParseStream(b *testing.B) {
data := pbRequest.MarshalProtobuf(nil) data := pbRequest.MarshalProtobuf(nil)
for p.Next() { for p.Next() {
err := ParseStream(bytes.NewBuffer(data), false, nil, func(tss []prompbmarshal.TimeSeries) error { err := ParseStream(bytes.NewBuffer(data), false, nil, func(_ []prompbmarshal.TimeSeries) error {
return nil return nil
}) })
if err != nil { if err != nil {

View file

@ -644,7 +644,7 @@ const (
func getOptimizedReMatchFuncExt(reMatch func(b []byte) bool, sre *syntax.Regexp) (func(b []byte) bool, string, uint64) { func getOptimizedReMatchFuncExt(reMatch func(b []byte) bool, sre *syntax.Regexp) (func(b []byte) bool, string, uint64) {
if isDotStar(sre) { if isDotStar(sre) {
// '.*' // '.*'
return func(b []byte) bool { return func(_ []byte) bool {
return true return true
}, "", fullMatchCost }, "", fullMatchCost
} }

View file

@ -784,7 +784,7 @@ func TestTagFilterMatchSuffix(t *testing.T) {
} }
} }
t.Run("plain-value", func(t *testing.T) { t.Run("plain-value", func(_ *testing.T) {
value := "xx" value := "xx"
isNegative := false isNegative := false
isRegexp := false isRegexp := false
@ -796,7 +796,7 @@ func TestTagFilterMatchSuffix(t *testing.T) {
mismatch("foo") mismatch("foo")
mismatch("xx") mismatch("xx")
}) })
t.Run("negative-plain-value", func(t *testing.T) { t.Run("negative-plain-value", func(_ *testing.T) {
value := "xx" value := "xx"
isNegative := true isNegative := true
isRegexp := false isRegexp := false
@ -811,7 +811,7 @@ func TestTagFilterMatchSuffix(t *testing.T) {
match("xxx") match("xxx")
match("xxfoo") match("xxfoo")
}) })
t.Run("regexp-convert-to-plain-value", func(t *testing.T) { t.Run("regexp-convert-to-plain-value", func(_ *testing.T) {
value := "http" value := "http"
isNegative := false isNegative := false
isRegexp := true isRegexp := true
@ -824,7 +824,7 @@ func TestTagFilterMatchSuffix(t *testing.T) {
mismatch("http") mismatch("http")
mismatch("foobar") mismatch("foobar")
}) })
t.Run("negative-regexp-convert-to-plain-value", func(t *testing.T) { t.Run("negative-regexp-convert-to-plain-value", func(_ *testing.T) {
value := "http" value := "http"
isNegative := true isNegative := true
isRegexp := true isRegexp := true
@ -839,7 +839,7 @@ func TestTagFilterMatchSuffix(t *testing.T) {
match("httpx") match("httpx")
match("foobar") match("foobar")
}) })
t.Run("regexp-prefix-any-suffix", func(t *testing.T) { t.Run("regexp-prefix-any-suffix", func(_ *testing.T) {
value := "http.*" value := "http.*"
isNegative := false isNegative := false
isRegexp := true isRegexp := true
@ -852,7 +852,7 @@ func TestTagFilterMatchSuffix(t *testing.T) {
match("http") match("http")
match("foobar") match("foobar")
}) })
t.Run("negative-regexp-prefix-any-suffix", func(t *testing.T) { t.Run("negative-regexp-prefix-any-suffix", func(_ *testing.T) {
value := "http.*" value := "http.*"
isNegative := true isNegative := true
isRegexp := true isRegexp := true
@ -867,7 +867,7 @@ func TestTagFilterMatchSuffix(t *testing.T) {
mismatch("httpsdf") mismatch("httpsdf")
mismatch("foobar") mismatch("foobar")
}) })
t.Run("regexp-prefix-contains-suffix", func(t *testing.T) { t.Run("regexp-prefix-contains-suffix", func(_ *testing.T) {
value := "http.*foo.*" value := "http.*foo.*"
isNegative := false isNegative := false
isRegexp := true isRegexp := true
@ -883,7 +883,7 @@ func TestTagFilterMatchSuffix(t *testing.T) {
match("xfoobar") match("xfoobar")
match("xfoo") match("xfoo")
}) })
t.Run("negative-regexp-prefix-contains-suffix", func(t *testing.T) { t.Run("negative-regexp-prefix-contains-suffix", func(_ *testing.T) {
value := "http.*foo.*" value := "http.*foo.*"
isNegative := true isNegative := true
isRegexp := true isRegexp := true
@ -903,7 +903,7 @@ func TestTagFilterMatchSuffix(t *testing.T) {
mismatch("httpxfoobar") mismatch("httpxfoobar")
mismatch("httpxfoo") mismatch("httpxfoo")
}) })
t.Run("negative-regexp-noprefix-contains-suffix", func(t *testing.T) { t.Run("negative-regexp-noprefix-contains-suffix", func(_ *testing.T) {
value := ".*foo.*" value := ".*foo.*"
isNegative := true isNegative := true
isRegexp := true isRegexp := true
@ -919,7 +919,7 @@ func TestTagFilterMatchSuffix(t *testing.T) {
mismatch("xfoobar") mismatch("xfoobar")
mismatch("xfoo") mismatch("xfoo")
}) })
t.Run("regexp-prefix-special-suffix", func(t *testing.T) { t.Run("regexp-prefix-special-suffix", func(_ *testing.T) {
value := "http.*bar" value := "http.*bar"
isNegative := false isNegative := false
isRegexp := true isRegexp := true
@ -934,7 +934,7 @@ func TestTagFilterMatchSuffix(t *testing.T) {
match("foobar") match("foobar")
mismatch("foobarx") mismatch("foobarx")
}) })
t.Run("negative-regexp-prefix-special-suffix", func(t *testing.T) { t.Run("negative-regexp-prefix-special-suffix", func(_ *testing.T) {
value := "http.*bar" value := "http.*bar"
isNegative := true isNegative := true
isRegexp := true isRegexp := true
@ -951,7 +951,7 @@ func TestTagFilterMatchSuffix(t *testing.T) {
match("httpxybarx") match("httpxybarx")
mismatch("ahttpxybar") mismatch("ahttpxybar")
}) })
t.Run("negative-regexp-noprefix-special-suffix", func(t *testing.T) { t.Run("negative-regexp-noprefix-special-suffix", func(_ *testing.T) {
value := ".*bar" value := ".*bar"
isNegative := true isNegative := true
isRegexp := true isRegexp := true
@ -1002,7 +1002,7 @@ func TestTagFilterMatchSuffix(t *testing.T) {
mismatch("bar") mismatch("bar")
match("xhttpbar") match("xhttpbar")
}) })
t.Run("regexp-iflag-no-suffix", func(t *testing.T) { t.Run("regexp-iflag-no-suffix", func(_ *testing.T) {
value := "(?i)http" value := "(?i)http"
isNegative := false isNegative := false
isRegexp := true isRegexp := true
@ -1020,7 +1020,7 @@ func TestTagFilterMatchSuffix(t *testing.T) {
mismatch("xhttp://") mismatch("xhttp://")
mismatch("hTTp://foobar.com") mismatch("hTTp://foobar.com")
}) })
t.Run("negative-regexp-iflag-no-suffix", func(t *testing.T) { t.Run("negative-regexp-iflag-no-suffix", func(_ *testing.T) {
value := "(?i)http" value := "(?i)http"
isNegative := true isNegative := true
isRegexp := true isRegexp := true
@ -1038,7 +1038,7 @@ func TestTagFilterMatchSuffix(t *testing.T) {
match("xhttp://") match("xhttp://")
match("hTTp://foobar.com") match("hTTp://foobar.com")
}) })
t.Run("regexp-iflag-any-suffix", func(t *testing.T) { t.Run("regexp-iflag-any-suffix", func(_ *testing.T) {
value := "(?i)http.*" value := "(?i)http.*"
isNegative := false isNegative := false
isRegexp := true isRegexp := true
@ -1055,7 +1055,7 @@ func TestTagFilterMatchSuffix(t *testing.T) {
mismatch("xhttp") mismatch("xhttp")
mismatch("xhttp://") mismatch("xhttp://")
}) })
t.Run("negative-regexp-iflag-any-suffix", func(t *testing.T) { t.Run("negative-regexp-iflag-any-suffix", func(_ *testing.T) {
value := "(?i)http.*" value := "(?i)http.*"
isNegative := true isNegative := true
isRegexp := true isRegexp := true

View file

@ -411,7 +411,7 @@ func BenchmarkOptimizedReMatchCost(b *testing.B) {
}) })
}) })
b.Run(".*", func(b *testing.B) { b.Run(".*", func(b *testing.B) {
reMatch := func(b []byte) bool { reMatch := func(_ []byte) bool {
return true return true
} }
suffix := []byte("foo1.bar.baz.sss.ddd") suffix := []byte("foo1.bar.baz.sss.ddd")

View file

@ -8,7 +8,7 @@ import (
) )
func BenchmarkDeduplicatorPush(b *testing.B) { func BenchmarkDeduplicatorPush(b *testing.B) {
pushFunc := func(tss []prompbmarshal.TimeSeries) {} pushFunc := func(_ []prompbmarshal.TimeSeries) {}
d := NewDeduplicator(pushFunc, time.Hour, nil) d := NewDeduplicator(pushFunc, time.Hour, nil)
b.ReportAllocs() b.ReportAllocs()

View file

@ -17,7 +17,7 @@ import (
func TestAggregatorsFailure(t *testing.T) { func TestAggregatorsFailure(t *testing.T) {
f := func(config string) { f := func(config string) {
t.Helper() t.Helper()
pushFunc := func(tss []prompbmarshal.TimeSeries) { pushFunc := func(_ []prompbmarshal.TimeSeries) {
panic(fmt.Errorf("pushFunc shouldn't be called")) panic(fmt.Errorf("pushFunc shouldn't be called"))
} }
a, err := newAggregatorsFromData([]byte(config), pushFunc, nil) a, err := newAggregatorsFromData([]byte(config), pushFunc, nil)
@ -157,7 +157,7 @@ func TestAggregatorsEqual(t *testing.T) {
f := func(a, b string, expectedResult bool) { f := func(a, b string, expectedResult bool) {
t.Helper() t.Helper()
pushFunc := func(tss []prompbmarshal.TimeSeries) {} pushFunc := func(_ []prompbmarshal.TimeSeries) {}
aa, err := newAggregatorsFromData([]byte(a), pushFunc, nil) aa, err := newAggregatorsFromData([]byte(a), pushFunc, nil)
if err != nil { if err != nil {
t.Fatalf("cannot initialize aggregators: %s", err) t.Fatalf("cannot initialize aggregators: %s", err)

View file

@ -43,7 +43,7 @@ func BenchmarkAggregatorsFlushSerial(b *testing.B) {
"max", "avg", "increase", "count_series", "max", "avg", "increase", "count_series",
"last", "stddev", "stdvar", "total_prometheus", "increase_prometheus", "last", "stddev", "stdvar", "total_prometheus", "increase_prometheus",
} }
pushFunc := func(tss []prompbmarshal.TimeSeries) {} pushFunc := func(_ []prompbmarshal.TimeSeries) {}
a := newBenchAggregators(outputs, pushFunc) a := newBenchAggregators(outputs, pushFunc)
defer a.MustStop() defer a.MustStop()
_ = a.Push(benchSeries, nil) _ = a.Push(benchSeries, nil)
@ -59,7 +59,7 @@ func BenchmarkAggregatorsFlushSerial(b *testing.B) {
} }
func benchmarkAggregatorsPush(b *testing.B, output string) { func benchmarkAggregatorsPush(b *testing.B, output string) {
pushFunc := func(tss []prompbmarshal.TimeSeries) {} pushFunc := func(_ []prompbmarshal.TimeSeries) {}
a := newBenchAggregators([]string{output}, pushFunc) a := newBenchAggregators([]string{output}, pushFunc)
defer a.MustStop() defer a.MustStop()

View file

@ -403,7 +403,7 @@ func testSetBasicOps(t *testing.T, itemsCount int) {
// Verify fast stop // Verify fast stop
calls := 0 calls := 0
s.ForEach(func(part []uint64) bool { s.ForEach(func(_ []uint64) bool {
calls++ calls++
return false return false
}) })
@ -413,7 +413,7 @@ func testSetBasicOps(t *testing.T, itemsCount int) {
// Verify ForEach on nil set. // Verify ForEach on nil set.
var s1 *Set var s1 *Set
s1.ForEach(func(part []uint64) bool { s1.ForEach(func(_ []uint64) bool {
t.Fatalf("callback shouldn't be called on empty set") t.Fatalf("callback shouldn't be called on empty set")
return true return true
}) })