From a1e0714c385c3c1882a1783fb5862aadc63ff6c1 Mon Sep 17 00:00:00 2001
From: Aliaksandr Valialkin <valyala@victoriametrics.com>
Date: Fri, 7 Jun 2024 18:28:34 +0200
Subject: [PATCH] lib,app: make golangc-lint happy

---
 app/victoria-metrics/main_test.go             |   2 +-
 .../elasticsearch_timing_test.go              |   2 +-
 app/vlinsert/loki/loki_json_test.go           |   2 +-
 app/vlinsert/loki/loki_json_timing_test.go    |   2 +-
 .../loki/loki_protobuf_timing_test.go         |   2 +-
 app/vmagent/remotewrite/remotewrite.go        |   2 +-
 app/vmalert/alerting.go                       |   2 +-
 app/vmalert/datasource/vm_test.go             |   2 +-
 app/vmalert/main.go                           |   2 +-
 app/vmalert/notifier/alert_test.go            |   2 +-
 app/vmalert/templates/template.go             |   2 +-
 app/vmalert/web_test.go                       |  10 +-
 app/vmctl/main.go                             |   2 +-
 app/vminsert/common/streamaggr.go             |   2 +-
 app/vminsert/main.go                          |   2 +-
 app/vmselect/graphite/eval.go                 |   2 +-
 app/vmselect/graphite/transform.go            |   4 +-
 app/vmselect/prometheus/prometheus.go         |   4 +-
 app/vmselect/promql/aggr.go                   |  22 +--
 app/vmselect/promql/binary_op.go              |   2 +-
 app/vmselect/promql/exec_test.go              |   4 +-
 app/vmselect/promql/rollup.go                 |   6 +-
 .../promql/rollup_result_cache_test.go        |   4 +-
 app/vmselect/promql/transform.go              |   2 +-
 lib/httpserver/httpserver.go                  |   4 +-
 lib/logstorage/filters_test.go                |   6 +-
 lib/logstorage/indexdb_test.go                |  42 +++---
 lib/logstorage/storage_search.go              |   2 +-
 lib/logstorage/storage_search_test.go         |  52 +++----
 lib/netutil/tls.go                            |   2 +-
 lib/persistentqueue/persistentqueue_test.go   |  12 +-
 lib/promrelabel/config_test.go                |  80 +++++-----
 lib/promrelabel/relabel_test.go               | 140 +++++++++---------
 lib/promscrape/client.go                      |   6 +-
 lib/promscrape/discoveryutils/client.go       |   6 +-
 lib/promscrape/scraper.go                     |   4 +-
 lib/promscrape/scrapework_test.go             |   6 +-
 lib/promscrape/scrapework_timing_test.go      |   2 +-
 .../stream/streamparser_timing_test.go        |   2 +-
 lib/storage/tag_filters.go                    |   2 +-
 lib/storage/tag_filters_test.go               |  32 ++--
 lib/storage/tag_filters_timing_test.go        |   2 +-
 lib/streamaggr/streamaggr_test.go             |   4 +-
 lib/streamaggr/streamaggr_timing_test.go      |   2 +-
 lib/uint64set/uint64set_test.go               |   4 +-
 45 files changed, 250 insertions(+), 250 deletions(-)

diff --git a/app/victoria-metrics/main_test.go b/app/victoria-metrics/main_test.go
index 3ca10ebe3e..41f939e5b3 100644
--- a/app/victoria-metrics/main_test.go
+++ b/app/victoria-metrics/main_test.go
@@ -299,7 +299,7 @@ func readIn(readFor string, t *testing.T, insertTime time.Time) []test {
 	t.Helper()
 	s := newSuite(t)
 	var tt []test
-	s.noError(filepath.Walk(filepath.Join(testFixturesDir, readFor), func(path string, info os.FileInfo, err error) error {
+	s.noError(filepath.Walk(filepath.Join(testFixturesDir, readFor), func(path string, _ os.FileInfo, err error) error {
 		if err != nil {
 			return err
 		}
diff --git a/app/vlinsert/elasticsearch/elasticsearch_timing_test.go b/app/vlinsert/elasticsearch/elasticsearch_timing_test.go
index 9a50fe0ebe..1437039918 100644
--- a/app/vlinsert/elasticsearch/elasticsearch_timing_test.go
+++ b/app/vlinsert/elasticsearch/elasticsearch_timing_test.go
@@ -33,7 +33,7 @@ func benchmarkReadBulkRequest(b *testing.B, isGzip bool) {
 
 	timeField := "@timestamp"
 	msgField := "message"
-	processLogMessage := func(timestmap int64, fields []logstorage.Field) {}
+	processLogMessage := func(_ int64, _ []logstorage.Field) {}
 
 	b.ReportAllocs()
 	b.SetBytes(int64(len(data)))
diff --git a/app/vlinsert/loki/loki_json_test.go b/app/vlinsert/loki/loki_json_test.go
index 93cf8652ad..6eaa88d362 100644
--- a/app/vlinsert/loki/loki_json_test.go
+++ b/app/vlinsert/loki/loki_json_test.go
@@ -11,7 +11,7 @@ import (
 func TestParseJSONRequestFailure(t *testing.T) {
 	f := func(s string) {
 		t.Helper()
-		n, err := parseJSONRequest([]byte(s), func(timestamp int64, fields []logstorage.Field) {
+		n, err := parseJSONRequest([]byte(s), func(_ int64, _ []logstorage.Field) {
 			t.Fatalf("unexpected call to parseJSONRequest callback!")
 		})
 		if err == nil {
diff --git a/app/vlinsert/loki/loki_json_timing_test.go b/app/vlinsert/loki/loki_json_timing_test.go
index 9c51f593a1..9f9f313137 100644
--- a/app/vlinsert/loki/loki_json_timing_test.go
+++ b/app/vlinsert/loki/loki_json_timing_test.go
@@ -27,7 +27,7 @@ func benchmarkParseJSONRequest(b *testing.B, streams, rows, labels int) {
 	b.RunParallel(func(pb *testing.PB) {
 		data := getJSONBody(streams, rows, labels)
 		for pb.Next() {
-			_, err := parseJSONRequest(data, func(timestamp int64, fields []logstorage.Field) {})
+			_, err := parseJSONRequest(data, func(_ int64, _ []logstorage.Field) {})
 			if err != nil {
 				panic(fmt.Errorf("unexpected error: %s", err))
 			}
diff --git a/app/vlinsert/loki/loki_protobuf_timing_test.go b/app/vlinsert/loki/loki_protobuf_timing_test.go
index 18f5b89ef6..4d5f50ba0a 100644
--- a/app/vlinsert/loki/loki_protobuf_timing_test.go
+++ b/app/vlinsert/loki/loki_protobuf_timing_test.go
@@ -28,7 +28,7 @@ func benchmarkParseProtobufRequest(b *testing.B, streams, rows, labels int) {
 	b.RunParallel(func(pb *testing.PB) {
 		body := getProtobufBody(streams, rows, labels)
 		for pb.Next() {
-			_, err := parseProtobufRequest(body, func(timestamp int64, fields []logstorage.Field) {})
+			_, err := parseProtobufRequest(body, func(_ int64, _ []logstorage.Field) {})
 			if err != nil {
 				panic(fmt.Errorf("unexpected error: %s", err))
 			}
diff --git a/app/vmagent/remotewrite/remotewrite.go b/app/vmagent/remotewrite/remotewrite.go
index 70340b6888..f130920d6d 100644
--- a/app/vmagent/remotewrite/remotewrite.go
+++ b/app/vmagent/remotewrite/remotewrite.go
@@ -798,7 +798,7 @@ func getRowsCount(tss []prompbmarshal.TimeSeries) int {
 
 // CheckStreamAggrConfigs checks configs pointed by -remoteWrite.streamAggr.config
 func CheckStreamAggrConfigs() error {
-	pushNoop := func(tss []prompbmarshal.TimeSeries) {}
+	pushNoop := func(_ []prompbmarshal.TimeSeries) {}
 	for idx, sasFile := range *streamAggrConfig {
 		if sasFile == "" {
 			continue
diff --git a/app/vmalert/alerting.go b/app/vmalert/alerting.go
index 14ade0cd18..37f70319e9 100644
--- a/app/vmalert/alerting.go
+++ b/app/vmalert/alerting.go
@@ -258,7 +258,7 @@ func (ar *AlertingRule) ExecRange(ctx context.Context, start, end time.Time) ([]
 		return nil, err
 	}
 	var result []prompbmarshal.TimeSeries
-	qFn := func(query string) ([]datasource.Metric, error) {
+	qFn := func(_ string) ([]datasource.Metric, error) {
 		return nil, fmt.Errorf("`query` template isn't supported in replay mode")
 	}
 	for _, s := range res.Data {
diff --git a/app/vmalert/datasource/vm_test.go b/app/vmalert/datasource/vm_test.go
index e9a410039e..6b3ae2d3c2 100644
--- a/app/vmalert/datasource/vm_test.go
+++ b/app/vmalert/datasource/vm_test.go
@@ -71,7 +71,7 @@ func TestVMInstantQuery(t *testing.T) {
 			w.Write([]byte(`{"status":"success","data":{"resultType":"scalar","result":[1583786142, "1"]},"stats":{"seriesFetched": "42"}}`))
 		}
 	})
-	mux.HandleFunc("/render", func(w http.ResponseWriter, request *http.Request) {
+	mux.HandleFunc("/render", func(w http.ResponseWriter, _ *http.Request) {
 		c++
 		switch c {
 		case 8:
diff --git a/app/vmalert/main.go b/app/vmalert/main.go
index b47c58f6cb..a49c6b10bb 100644
--- a/app/vmalert/main.go
+++ b/app/vmalert/main.go
@@ -300,7 +300,7 @@ func getAlertURLGenerator(externalURL *url.URL, externalAlertSource string, vali
 		"tpl": externalAlertSource,
 	}
 	return func(alert notifier.Alert) string {
-		qFn := func(query string) ([]datasource.Metric, error) {
+		qFn := func(_ string) ([]datasource.Metric, error) {
 			return nil, fmt.Errorf("`query` template isn't supported for alert source template")
 		}
 		templated, err := alert.ExecTemplate(qFn, alert.Labels, m)
diff --git a/app/vmalert/notifier/alert_test.go b/app/vmalert/notifier/alert_test.go
index c2a51c3042..b02678dcea 100644
--- a/app/vmalert/notifier/alert_test.go
+++ b/app/vmalert/notifier/alert_test.go
@@ -178,7 +178,7 @@ func TestAlert_ExecTemplate(t *testing.T) {
 		},
 	}
 
-	qFn := func(q string) ([]datasource.Metric, error) {
+	qFn := func(_ string) ([]datasource.Metric, error) {
 		return []datasource.Metric{
 			{
 				Labels: []datasource.Label{
diff --git a/app/vmalert/templates/template.go b/app/vmalert/templates/template.go
index 08d81b00e5..8bbdd4c60c 100644
--- a/app/vmalert/templates/template.go
+++ b/app/vmalert/templates/template.go
@@ -476,7 +476,7 @@ func templateFuncs() textTpl.FuncMap {
 		// For example, {{ query "foo" | first | value }} will
 		// execute "/api/v1/query?query=foo" request and will return
 		// the first value in response.
-		"query": func(q string) ([]metric, error) {
+		"query": func(_ string) ([]metric, error) {
 			// query function supposed to be substituted at FuncsWithQuery().
 			// it is present here only for validation purposes, when there is no
 			// provided datasource.
diff --git a/app/vmalert/web_test.go b/app/vmalert/web_test.go
index 951e92e96e..1eeef7455a 100644
--- a/app/vmalert/web_test.go
+++ b/app/vmalert/web_test.go
@@ -60,7 +60,7 @@ func TestHandler(t *testing.T) {
 	ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { rh.handler(w, r) }))
 	defer ts.Close()
 
-	t.Run("/", func(t *testing.T) {
+	t.Run("/", func(_ *testing.T) {
 		getResp(ts.URL, nil, 200)
 		getResp(ts.URL+"/vmalert", nil, 200)
 		getResp(ts.URL+"/vmalert/alerts", nil, 200)
@@ -69,19 +69,19 @@ func TestHandler(t *testing.T) {
 		getResp(ts.URL+"/rules", nil, 200)
 	})
 
-	t.Run("/vmalert/rule", func(t *testing.T) {
+	t.Run("/vmalert/rule", func(_ *testing.T) {
 		a := ar.ToAPI()
 		getResp(ts.URL+"/vmalert/"+a.WebLink(), nil, 200)
 		r := rr.ToAPI()
 		getResp(ts.URL+"/vmalert/"+r.WebLink(), nil, 200)
 	})
-	t.Run("/vmalert/alert", func(t *testing.T) {
+	t.Run("/vmalert/alert", func(_ *testing.T) {
 		alerts := ar.AlertsToAPI()
 		for _, a := range alerts {
 			getResp(ts.URL+"/vmalert/"+a.WebLink(), nil, 200)
 		}
 	})
-	t.Run("/vmalert/rule?badParam", func(t *testing.T) {
+	t.Run("/vmalert/rule?badParam", func(_ *testing.T) {
 		params := fmt.Sprintf("?%s=0&%s=1", paramGroupID, paramRuleID)
 		getResp(ts.URL+"/vmalert/rule"+params, nil, 404)
 
@@ -117,7 +117,7 @@ func TestHandler(t *testing.T) {
 		}
 	})
 
-	t.Run("/api/v1/alert?badParams", func(t *testing.T) {
+	t.Run("/api/v1/alert?badParams", func(_ *testing.T) {
 		params := fmt.Sprintf("?%s=0&%s=1", paramGroupID, paramAlertID)
 		getResp(ts.URL+"/api/v1/alert"+params, nil, 404)
 		getResp(ts.URL+"/vmalert/api/v1/alert"+params, nil, 404)
diff --git a/app/vmctl/main.go b/app/vmctl/main.go
index 73d9827ffe..12f98d30bc 100644
--- a/app/vmctl/main.go
+++ b/app/vmctl/main.go
@@ -284,7 +284,7 @@ func main() {
 					}
 					defer f.Close()
 					var blocksCount uint64
-					if err := stream.Parse(f, isBlockGzipped, func(block *stream.Block) error {
+					if err := stream.Parse(f, isBlockGzipped, func(_ *stream.Block) error {
 						atomic.AddUint64(&blocksCount, 1)
 						return nil
 					}); err != nil {
diff --git a/app/vminsert/common/streamaggr.go b/app/vminsert/common/streamaggr.go
index 4fb07fe34d..ff607670e8 100644
--- a/app/vminsert/common/streamaggr.go
+++ b/app/vminsert/common/streamaggr.go
@@ -49,7 +49,7 @@ func CheckStreamAggrConfig() error {
 	if *streamAggrConfig == "" {
 		return nil
 	}
-	pushNoop := func(tss []prompbmarshal.TimeSeries) {}
+	pushNoop := func(_ []prompbmarshal.TimeSeries) {}
 	sas, err := streamaggr.LoadFromFile(*streamAggrConfig, pushNoop, *streamAggrDedupInterval)
 	if err != nil {
 		return fmt.Errorf("error when loading -streamAggr.config=%q: %w", *streamAggrConfig, err)
diff --git a/app/vminsert/main.go b/app/vminsert/main.go
index 5f0c00f78b..69fc298f9c 100644
--- a/app/vminsert/main.go
+++ b/app/vminsert/main.go
@@ -95,7 +95,7 @@ func Init() {
 	if len(*opentsdbHTTPListenAddr) > 0 {
 		opentsdbhttpServer = opentsdbhttpserver.MustStart(*opentsdbHTTPListenAddr, *opentsdbHTTPUseProxyProtocol, opentsdbhttp.InsertHandler)
 	}
-	promscrape.Init(func(at *auth.Token, wr *prompbmarshal.WriteRequest) {
+	promscrape.Init(func(_ *auth.Token, wr *prompbmarshal.WriteRequest) {
 		prompush.Push(wr)
 	})
 }
diff --git a/app/vmselect/graphite/eval.go b/app/vmselect/graphite/eval.go
index 22526ddd19..69c9e39233 100644
--- a/app/vmselect/graphite/eval.go
+++ b/app/vmselect/graphite/eval.go
@@ -160,7 +160,7 @@ func newNextSeriesForSearchQuery(ec *evalConfig, sq *storage.SearchQuery, expr g
 	seriesCh := make(chan *series, cgroup.AvailableCPUs())
 	errCh := make(chan error, 1)
 	go func() {
-		err := rss.RunParallel(nil, func(rs *netstorage.Result, workerID uint) error {
+		err := rss.RunParallel(nil, func(rs *netstorage.Result, _ uint) error {
 			nameWithTags := getCanonicalPath(&rs.MetricName)
 			tags := unmarshalTags(nameWithTags)
 			s := &series{
diff --git a/app/vmselect/graphite/transform.go b/app/vmselect/graphite/transform.go
index 5cfeca59a4..b90f092d28 100644
--- a/app/vmselect/graphite/transform.go
+++ b/app/vmselect/graphite/transform.go
@@ -401,7 +401,7 @@ func aggregateSeriesWithWildcards(ec *evalConfig, expr graphiteql.Expr, nextSeri
 	for _, pos := range positions {
 		positionsMap[pos] = struct{}{}
 	}
-	keyFunc := func(name string, tags map[string]string) string {
+	keyFunc := func(name string, _ map[string]string) string {
 		parts := strings.Split(getPathFromName(name), ".")
 		dstParts := parts[:0]
 		for i, part := range parts {
@@ -1819,7 +1819,7 @@ func transformGroupByTags(ec *evalConfig, fe *graphiteql.FuncExpr) (nextSeriesFu
 	if err != nil {
 		return nil, err
 	}
-	keyFunc := func(name string, tags map[string]string) string {
+	keyFunc := func(_ string, tags map[string]string) string {
 		return formatKeyFromTags(tags, tagKeys, callback)
 	}
 	return groupByKeyFunc(ec, fe, nextSeries, callback, keyFunc)
diff --git a/app/vmselect/prometheus/prometheus.go b/app/vmselect/prometheus/prometheus.go
index 6bb8921b0d..d116023786 100644
--- a/app/vmselect/prometheus/prometheus.go
+++ b/app/vmselect/prometheus/prometheus.go
@@ -225,7 +225,7 @@ func ExportNativeHandler(startTime time.Time, w http.ResponseWriter, r *http.Req
 	_, _ = bw.Write(trBuf)
 
 	// Marshal native blocks.
-	err = netstorage.ExportBlocks(nil, sq, cp.deadline, func(mn *storage.MetricName, b *storage.Block, tr storage.TimeRange, workerID uint) error {
+	err = netstorage.ExportBlocks(nil, sq, cp.deadline, func(mn *storage.MetricName, b *storage.Block, _ storage.TimeRange, workerID uint) error {
 		if err := bw.Error(); err != nil {
 			return err
 		}
@@ -1201,7 +1201,7 @@ func (sw *scalableWriter) maybeFlushBuffer(bb *bytesutil.ByteBuffer) error {
 }
 
 func (sw *scalableWriter) flush() error {
-	sw.m.Range(func(k, v interface{}) bool {
+	sw.m.Range(func(_, v interface{}) bool {
 		bb := v.(*bytesutil.ByteBuffer)
 		_, err := sw.bw.Write(bb.B)
 		return err == nil
diff --git a/app/vmselect/promql/aggr.go b/app/vmselect/promql/aggr.go
index 6ab3b21369..3c5c9904fd 100644
--- a/app/vmselect/promql/aggr.go
+++ b/app/vmselect/promql/aggr.go
@@ -75,7 +75,7 @@ func newAggrFunc(afe func(tss []*timeseries) []*timeseries) aggrFunc {
 		if err != nil {
 			return nil, err
 		}
-		return aggrFuncExt(func(tss []*timeseries, modififer *metricsql.ModifierExpr) []*timeseries {
+		return aggrFuncExt(func(tss []*timeseries, _ *metricsql.ModifierExpr) []*timeseries {
 			return afe(tss)
 		}, tss, &afa.ae.Modifier, afa.ae.Limit, false)
 	}
@@ -150,7 +150,7 @@ func aggrFuncAny(afa *aggrFuncArg) ([]*timeseries, error) {
 	if err != nil {
 		return nil, err
 	}
-	afe := func(tss []*timeseries, modifier *metricsql.ModifierExpr) []*timeseries {
+	afe := func(tss []*timeseries, _ *metricsql.ModifierExpr) []*timeseries {
 		return tss[:1]
 	}
 	limit := afa.ae.Limit
@@ -459,7 +459,7 @@ func aggrFuncShare(afa *aggrFuncArg) ([]*timeseries, error) {
 	if err != nil {
 		return nil, err
 	}
-	afe := func(tss []*timeseries, modifier *metricsql.ModifierExpr) []*timeseries {
+	afe := func(tss []*timeseries, _ *metricsql.ModifierExpr) []*timeseries {
 		for i := range tss[0].Values {
 			// Calculate sum for non-negative points at position i.
 			var sum float64
@@ -490,7 +490,7 @@ func aggrFuncZScore(afa *aggrFuncArg) ([]*timeseries, error) {
 	if err != nil {
 		return nil, err
 	}
-	afe := func(tss []*timeseries, modifier *metricsql.ModifierExpr) []*timeseries {
+	afe := func(tss []*timeseries, _ *metricsql.ModifierExpr) []*timeseries {
 		for i := range tss[0].Values {
 			// Calculate avg and stddev for tss points at position i.
 			// See `Rapid calculation methods` at https://en.wikipedia.org/wiki/Standard_deviation
@@ -586,7 +586,7 @@ func aggrFuncCountValues(afa *aggrFuncArg) ([]*timeseries, error) {
 		// Do nothing
 	}
 
-	afe := func(tss []*timeseries, modififer *metricsql.ModifierExpr) ([]*timeseries, error) {
+	afe := func(tss []*timeseries, _ *metricsql.ModifierExpr) ([]*timeseries, error) {
 		m := make(map[float64]*timeseries)
 		for _, ts := range tss {
 			for i, v := range ts.Values {
@@ -648,7 +648,7 @@ func newAggrFuncTopK(isReverse bool) aggrFunc {
 		if err != nil {
 			return nil, err
 		}
-		afe := func(tss []*timeseries, modififer *metricsql.ModifierExpr) []*timeseries {
+		afe := func(tss []*timeseries, _ *metricsql.ModifierExpr) []*timeseries {
 			for n := range tss[0].Values {
 				lessFunc := lessWithNaNs
 				if isReverse {
@@ -956,7 +956,7 @@ func aggrFuncOutliersMAD(afa *aggrFuncArg) ([]*timeseries, error) {
 	if err != nil {
 		return nil, err
 	}
-	afe := func(tss []*timeseries, modifier *metricsql.ModifierExpr) []*timeseries {
+	afe := func(tss []*timeseries, _ *metricsql.ModifierExpr) []*timeseries {
 		// Calculate medians for each point across tss.
 		medians := getPerPointMedians(tss)
 		// Calculate MAD values multiplied by tolerance for each point across tss.
@@ -992,7 +992,7 @@ func aggrFuncOutliersK(afa *aggrFuncArg) ([]*timeseries, error) {
 	if err != nil {
 		return nil, err
 	}
-	afe := func(tss []*timeseries, modifier *metricsql.ModifierExpr) []*timeseries {
+	afe := func(tss []*timeseries, _ *metricsql.ModifierExpr) []*timeseries {
 		// Calculate medians for each point across tss.
 		medians := getPerPointMedians(tss)
 		// Return topK time series with the highest variance from median.
@@ -1063,7 +1063,7 @@ func aggrFuncLimitK(afa *aggrFuncArg) ([]*timeseries, error) {
 	if limit < 0 {
 		limit = 0
 	}
-	afe := func(tss []*timeseries, modifier *metricsql.ModifierExpr) []*timeseries {
+	afe := func(tss []*timeseries, _ *metricsql.ModifierExpr) []*timeseries {
 		// Sort series by metricName hash in order to get consistent set of output series
 		// across multiple calls to limitk() function.
 		// Sort series by hash in order to guarantee uniform selection across series.
@@ -1127,7 +1127,7 @@ func aggrFuncQuantiles(afa *aggrFuncArg) ([]*timeseries, error) {
 		phis[i] = phisLocal[0]
 	}
 	argOrig := args[len(args)-1]
-	afe := func(tss []*timeseries, modifier *metricsql.ModifierExpr) []*timeseries {
+	afe := func(tss []*timeseries, _ *metricsql.ModifierExpr) []*timeseries {
 		tssDst := make([]*timeseries, len(phiArgs))
 		for j := range tssDst {
 			ts := &timeseries{}
@@ -1184,7 +1184,7 @@ func aggrFuncMedian(afa *aggrFuncArg) ([]*timeseries, error) {
 }
 
 func newAggrQuantileFunc(phis []float64) func(tss []*timeseries, modifier *metricsql.ModifierExpr) []*timeseries {
-	return func(tss []*timeseries, modifier *metricsql.ModifierExpr) []*timeseries {
+	return func(tss []*timeseries, _ *metricsql.ModifierExpr) []*timeseries {
 		dst := tss[0]
 		a := getFloat64s()
 		values := a.A
diff --git a/app/vmselect/promql/binary_op.go b/app/vmselect/promql/binary_op.go
index d020825569..f827811112 100644
--- a/app/vmselect/promql/binary_op.go
+++ b/app/vmselect/promql/binary_op.go
@@ -74,7 +74,7 @@ func newBinaryOpCmpFunc(cf func(left, right float64) bool) binaryOpFunc {
 }
 
 func newBinaryOpArithFunc(af func(left, right float64) float64) binaryOpFunc {
-	afe := func(left, right float64, isBool bool) float64 {
+	afe := func(left, right float64, _ bool) float64 {
 		return af(left, right)
 	}
 	return newBinaryOpFunc(afe)
diff --git a/app/vmselect/promql/exec_test.go b/app/vmselect/promql/exec_test.go
index 386067a317..264c9d7865 100644
--- a/app/vmselect/promql/exec_test.go
+++ b/app/vmselect/promql/exec_test.go
@@ -175,12 +175,12 @@ func TestExecSuccess(t *testing.T) {
 		resultExpected := []netstorage.Result{r}
 		f(q, resultExpected)
 	})
-	t.Run("scalar-string-nonnum", func(t *testing.T) {
+	t.Run("scalar-string-nonnum", func(_ *testing.T) {
 		q := `scalar("fooobar")`
 		resultExpected := []netstorage.Result{}
 		f(q, resultExpected)
 	})
-	t.Run("scalar-string-num", func(t *testing.T) {
+	t.Run("scalar-string-num", func(_ *testing.T) {
 		q := `scalar("-12.34")`
 		r := netstorage.Result{
 			MetricName: metricNameExpected,
diff --git a/app/vmselect/promql/rollup.go b/app/vmselect/promql/rollup.go
index cf524f90b7..35a626358b 100644
--- a/app/vmselect/promql/rollup.go
+++ b/app/vmselect/promql/rollup.go
@@ -326,10 +326,10 @@ func getRollupTag(expr metricsql.Expr) (string, error) {
 func getRollupConfigs(funcName string, rf rollupFunc, expr metricsql.Expr, start, end, step int64, maxPointsPerSeries int,
 	window, lookbackDelta int64, sharedTimestamps []int64) (
 	func(values []float64, timestamps []int64), []*rollupConfig, error) {
-	preFunc := func(values []float64, timestamps []int64) {}
+	preFunc := func(_ []float64, _ []int64) {}
 	funcName = strings.ToLower(funcName)
 	if rollupFuncsRemoveCounterResets[funcName] {
-		preFunc = func(values []float64, timestamps []int64) {
+		preFunc = func(values []float64, _ []int64) {
 			removeCounterResets(values)
 		}
 	}
@@ -441,7 +441,7 @@ func getRollupConfigs(funcName string, rf rollupFunc, expr metricsql.Expr, start
 		for _, aggrFuncName := range aggrFuncNames {
 			if rollupFuncsRemoveCounterResets[aggrFuncName] {
 				// There is no need to save the previous preFunc, since it is either empty or the same.
-				preFunc = func(values []float64, timestamps []int64) {
+				preFunc = func(values []float64, _ []int64) {
 					removeCounterResets(values)
 				}
 			}
diff --git a/app/vmselect/promql/rollup_result_cache_test.go b/app/vmselect/promql/rollup_result_cache_test.go
index d72a9f8d4e..145ed7a59c 100644
--- a/app/vmselect/promql/rollup_result_cache_test.go
+++ b/app/vmselect/promql/rollup_result_cache_test.go
@@ -9,13 +9,13 @@ import (
 )
 
 func TestRollupResultCacheInitStop(t *testing.T) {
-	t.Run("inmemory", func(t *testing.T) {
+	t.Run("inmemory", func(_ *testing.T) {
 		for i := 0; i < 5; i++ {
 			InitRollupResultCache("")
 			StopRollupResultCache()
 		}
 	})
-	t.Run("file-based", func(t *testing.T) {
+	t.Run("file-based", func(_ *testing.T) {
 		cacheFilePath := "test-rollup-result-cache"
 		for i := 0; i < 3; i++ {
 			InitRollupResultCache(cacheFilePath)
diff --git a/app/vmselect/promql/transform.go b/app/vmselect/promql/transform.go
index eaccde4529..1f63b6d616 100644
--- a/app/vmselect/promql/transform.go
+++ b/app/vmselect/promql/transform.go
@@ -910,7 +910,7 @@ func transformHistogramQuantile(tfa *transformFuncArg) ([]*timeseries, error) {
 	m := groupLeTimeseries(tss)
 
 	// Calculate quantile for each group in m
-	lastNonInf := func(i int, xss []leTimeseries) float64 {
+	lastNonInf := func(_ int, xss []leTimeseries) float64 {
 		for len(xss) > 0 {
 			xsLast := xss[len(xss)-1]
 			if !math.IsInf(xsLast.le, 0) {
diff --git a/lib/httpserver/httpserver.go b/lib/httpserver/httpserver.go
index 43c1cde341..58cd372857 100644
--- a/lib/httpserver/httpserver.go
+++ b/lib/httpserver/httpserver.go
@@ -81,7 +81,7 @@ type RequestHandler func(w http.ResponseWriter, r *http.Request) bool
 // See https://www.haproxy.org/download/1.8/doc/proxy-protocol.txt
 func Serve(addr string, useProxyProtocol bool, rh RequestHandler) {
 	if rh == nil {
-		rh = func(w http.ResponseWriter, r *http.Request) bool {
+		rh = func(_ http.ResponseWriter, _ *http.Request) bool {
 			return false
 		}
 	}
@@ -126,7 +126,7 @@ func serveWithListener(addr string, ln net.Listener, rh RequestHandler) {
 
 		ErrorLog: logger.StdErrorLogger(),
 
-		ConnContext: func(ctx context.Context, c net.Conn) context.Context {
+		ConnContext: func(ctx context.Context, _ net.Conn) context.Context {
 			timeoutSec := connTimeout.Seconds()
 			// Add a jitter for connection timeout in order to prevent Thundering herd problem
 			// when all the connections are established at the same time.
diff --git a/lib/logstorage/filters_test.go b/lib/logstorage/filters_test.go
index cf7d6e7827..a565e38380 100644
--- a/lib/logstorage/filters_test.go
+++ b/lib/logstorage/filters_test.go
@@ -211,11 +211,11 @@ func TestFilterBitmap(t *testing.T) {
 		})
 
 		// Clear all the bits
-		bm.forEachSetBit(func(idx int) bool {
+		bm.forEachSetBit(func(_ int) bool {
 			return false
 		})
 		bitsCount := 0
-		bm.forEachSetBit(func(idx int) bool {
+		bm.forEachSetBit(func(_ int) bool {
 			bitsCount++
 			return true
 		})
@@ -9226,7 +9226,7 @@ func testFilterMatchForStorage(t *testing.T, s *Storage, tenantID TenantID, f fi
 		resultColumnNames: []string{resultColumnName},
 	}
 	workersCount := 3
-	s.search(workersCount, so, nil, func(workerID uint, br *blockResult) {
+	s.search(workersCount, so, nil, func(_ uint, br *blockResult) {
 		// Verify tenantID
 		if !br.streamID.tenantID.equal(&tenantID) {
 			t.Fatalf("unexpected tenantID in blockResult; got %s; want %s", &br.streamID.tenantID, &tenantID)
diff --git a/lib/logstorage/indexdb_test.go b/lib/logstorage/indexdb_test.go
index 02e0951f00..affef70b65 100644
--- a/lib/logstorage/indexdb_test.go
+++ b/lib/logstorage/indexdb_test.go
@@ -76,58 +76,58 @@ func TestStorageSearchStreamIDs(t *testing.T) {
 			}
 		}
 	})
-	t.Run("missing-job", func(t *testing.T) {
+	t.Run("missing-job", func(_ *testing.T) {
 		f(`{job="non-existing-job",instance="instance-0"}`, nil)
 	})
-	t.Run("missing-job-re", func(t *testing.T) {
+	t.Run("missing-job-re", func(_ *testing.T) {
 		f(`{job=~"non-existing-job|",instance="instance-0"}`, nil)
 	})
-	t.Run("missing-job-negative-re", func(t *testing.T) {
+	t.Run("missing-job-negative-re", func(_ *testing.T) {
 		f(`{job!~"job.+",instance="instance-0"}`, nil)
 	})
-	t.Run("empty-job", func(t *testing.T) {
+	t.Run("empty-job", func(_ *testing.T) {
 		f(`{job="",instance="instance-0"}`, nil)
 	})
-	t.Run("missing-instance", func(t *testing.T) {
+	t.Run("missing-instance", func(_ *testing.T) {
 		f(`{job="job-0",instance="non-existing-instance"}`, nil)
 	})
-	t.Run("missing-instance-re", func(t *testing.T) {
+	t.Run("missing-instance-re", func(_ *testing.T) {
 		f(`{job="job-0",instance=~"non-existing-instance|"}`, nil)
 	})
-	t.Run("missing-instance-negative-re", func(t *testing.T) {
+	t.Run("missing-instance-negative-re", func(_ *testing.T) {
 		f(`{job="job-0",instance!~"instance.+"}`, nil)
 	})
-	t.Run("empty-instance", func(t *testing.T) {
+	t.Run("empty-instance", func(_ *testing.T) {
 		f(`{job="job-0",instance=""}`, nil)
 	})
-	t.Run("non-existing-tag", func(t *testing.T) {
+	t.Run("non-existing-tag", func(_ *testing.T) {
 		f(`{job="job-0",instance="instance-0",non_existing_tag="foobar"}`, nil)
 	})
-	t.Run("non-existing-non-empty-tag", func(t *testing.T) {
+	t.Run("non-existing-non-empty-tag", func(_ *testing.T) {
 		f(`{job="job-0",instance="instance-0",non_existing_tag!=""}`, nil)
 	})
-	t.Run("non-existing-tag-re", func(t *testing.T) {
+	t.Run("non-existing-tag-re", func(_ *testing.T) {
 		f(`{job="job-0",instance="instance-0",non_existing_tag=~"foo.+"}`, nil)
 	})
-	t.Run("non-existing-non-empty-tag-re", func(t *testing.T) {
+	t.Run("non-existing-non-empty-tag-re", func(_ *testing.T) {
 		f(`{job="job-0",instance="instance-0",non_existing_tag!~""}`, nil)
 	})
 
-	t.Run("match-job-instance", func(t *testing.T) {
+	t.Run("match-job-instance", func(_ *testing.T) {
 		sid, _ := getStreamIDForTags(map[string]string{
 			"instance": "instance-0",
 			"job":      "job-0",
 		})
 		f(`{job="job-0",instance="instance-0"}`, []streamID{sid})
 	})
-	t.Run("match-non-existing-tag", func(t *testing.T) {
+	t.Run("match-non-existing-tag", func(_ *testing.T) {
 		sid, _ := getStreamIDForTags(map[string]string{
 			"instance": "instance-0",
 			"job":      "job-0",
 		})
 		f(`{job="job-0",instance="instance-0",non_existing_tag=~"foo|"}`, []streamID{sid})
 	})
-	t.Run("match-job", func(t *testing.T) {
+	t.Run("match-job", func(_ *testing.T) {
 		var streamIDs []streamID
 		for i := 0; i < instancesCount; i++ {
 			sid, _ := getStreamIDForTags(map[string]string{
@@ -138,7 +138,7 @@ func TestStorageSearchStreamIDs(t *testing.T) {
 		}
 		f(`{job="job-0"}`, streamIDs)
 	})
-	t.Run("match-instance", func(t *testing.T) {
+	t.Run("match-instance", func(_ *testing.T) {
 		var streamIDs []streamID
 		for i := 0; i < jobsCount; i++ {
 			sid, _ := getStreamIDForTags(map[string]string{
@@ -149,7 +149,7 @@ func TestStorageSearchStreamIDs(t *testing.T) {
 		}
 		f(`{instance="instance-1"}`, streamIDs)
 	})
-	t.Run("match-re", func(t *testing.T) {
+	t.Run("match-re", func(_ *testing.T) {
 		var streamIDs []streamID
 		for _, instanceID := range []int{3, 1} {
 			for _, jobID := range []int{0, 2} {
@@ -162,7 +162,7 @@ func TestStorageSearchStreamIDs(t *testing.T) {
 		}
 		f(`{job=~"job-(0|2)",instance=~"instance-[13]"}`, streamIDs)
 	})
-	t.Run("match-re-empty-match", func(t *testing.T) {
+	t.Run("match-re-empty-match", func(_ *testing.T) {
 		var streamIDs []streamID
 		for _, instanceID := range []int{3, 1} {
 			for _, jobID := range []int{0, 2} {
@@ -175,7 +175,7 @@ func TestStorageSearchStreamIDs(t *testing.T) {
 		}
 		f(`{job=~"job-(0|2)|",instance=~"instance-[13]"}`, streamIDs)
 	})
-	t.Run("match-negative-re", func(t *testing.T) {
+	t.Run("match-negative-re", func(_ *testing.T) {
 		var instanceIDs []int
 		for i := 0; i < instancesCount; i++ {
 			if i != 0 && i != 1 {
@@ -200,7 +200,7 @@ func TestStorageSearchStreamIDs(t *testing.T) {
 		}
 		f(`{job!~"job-[0-2]",instance!~"instance-(0|1)"}`, streamIDs)
 	})
-	t.Run("match-negative-re-empty-match", func(t *testing.T) {
+	t.Run("match-negative-re-empty-match", func(_ *testing.T) {
 		var instanceIDs []int
 		for i := 0; i < instancesCount; i++ {
 			if i != 0 && i != 1 {
@@ -225,7 +225,7 @@ func TestStorageSearchStreamIDs(t *testing.T) {
 		}
 		f(`{job!~"job-[0-2]",instance!~"instance-(0|1)|"}`, streamIDs)
 	})
-	t.Run("match-negative-job", func(t *testing.T) {
+	t.Run("match-negative-job", func(_ *testing.T) {
 		instanceIDs := []int{2}
 		var jobIDs []int
 		for i := 0; i < jobsCount; i++ {
diff --git a/lib/logstorage/storage_search.go b/lib/logstorage/storage_search.go
index a82b9e4771..769bf2c635 100644
--- a/lib/logstorage/storage_search.go
+++ b/lib/logstorage/storage_search.go
@@ -51,7 +51,7 @@ func (s *Storage) RunQuery(tenantIDs []TenantID, q *Query, stopCh <-chan struct{
 		resultColumnNames: resultColumnNames,
 	}
 	workersCount := cgroup.AvailableCPUs()
-	s.search(workersCount, so, stopCh, func(workerID uint, br *blockResult) {
+	s.search(workersCount, so, stopCh, func(_ uint, br *blockResult) {
 		brs := getBlockRows()
 		cs := brs.cs
 
diff --git a/lib/logstorage/storage_search_test.go b/lib/logstorage/storage_search_test.go
index 63404838ce..b624b244d5 100644
--- a/lib/logstorage/storage_search_test.go
+++ b/lib/logstorage/storage_search_test.go
@@ -78,25 +78,25 @@ func TestStorageRunQuery(t *testing.T) {
 	s.debugFlush()
 
 	// run tests on the storage data
-	t.Run("missing-tenant", func(t *testing.T) {
+	t.Run("missing-tenant", func(_ *testing.T) {
 		q := mustParseQuery(`"log message"`)
 		tenantID := TenantID{
 			AccountID: 0,
 			ProjectID: 0,
 		}
-		processBlock := func(columns []BlockColumn) {
+		processBlock := func(_ []BlockColumn) {
 			panic(fmt.Errorf("unexpected match"))
 		}
 		tenantIDs := []TenantID{tenantID}
 		s.RunQuery(tenantIDs, q, nil, processBlock)
 	})
-	t.Run("missing-message-text", func(t *testing.T) {
+	t.Run("missing-message-text", func(_ *testing.T) {
 		q := mustParseQuery(`foobar`)
 		tenantID := TenantID{
 			AccountID: 1,
 			ProjectID: 11,
 		}
-		processBlock := func(columns []BlockColumn) {
+		processBlock := func(_ []BlockColumn) {
 			panic(fmt.Errorf("unexpected match"))
 		}
 		tenantIDs := []TenantID{tenantID}
@@ -168,9 +168,9 @@ func TestStorageRunQuery(t *testing.T) {
 			t.Fatalf("unexpected number of matching rows; got %d; want %d", rowsCount, expectedRowsCount)
 		}
 	})
-	t.Run("stream-filter-mismatch", func(t *testing.T) {
+	t.Run("stream-filter-mismatch", func(_ *testing.T) {
 		q := mustParseQuery(`_stream:{job="foobar",instance=~"host-.+:2345"} log`)
-		processBlock := func(columns []BlockColumn) {
+		processBlock := func(_ []BlockColumn) {
 			panic(fmt.Errorf("unexpected match"))
 		}
 		s.RunQuery(allTenantIDs, q, nil, processBlock)
@@ -273,7 +273,7 @@ func TestStorageRunQuery(t *testing.T) {
 			t.Fatalf("unexpected number of rows; got %d; want %d", rowsCount, expectedRowsCount)
 		}
 	})
-	t.Run("matching-stream-id-missing-time-range", func(t *testing.T) {
+	t.Run("matching-stream-id-missing-time-range", func(_ *testing.T) {
 		minTimestamp := baseTimestamp + (rowsPerBlock+1)*1e9
 		maxTimestamp := baseTimestamp + (rowsPerBlock+2)*1e9
 		q := mustParseQuery(fmt.Sprintf(`_stream:{job="foobar",instance="host-1:234"} _time:[%d, %d)`, minTimestamp/1e9, maxTimestamp/1e9))
@@ -281,13 +281,13 @@ func TestStorageRunQuery(t *testing.T) {
 			AccountID: 1,
 			ProjectID: 11,
 		}
-		processBlock := func(columns []BlockColumn) {
+		processBlock := func(_ []BlockColumn) {
 			panic(fmt.Errorf("unexpected match"))
 		}
 		tenantIDs := []TenantID{tenantID}
 		s.RunQuery(tenantIDs, q, nil, processBlock)
 	})
-	t.Run("missing-time-range", func(t *testing.T) {
+	t.Run("missing-time-range", func(_ *testing.T) {
 		minTimestamp := baseTimestamp + (rowsPerBlock+1)*1e9
 		maxTimestamp := baseTimestamp + (rowsPerBlock+2)*1e9
 		q := mustParseQuery(fmt.Sprintf(`_time:[%d, %d)`, minTimestamp/1e9, maxTimestamp/1e9))
@@ -295,7 +295,7 @@ func TestStorageRunQuery(t *testing.T) {
 			AccountID: 1,
 			ProjectID: 11,
 		}
-		processBlock := func(columns []BlockColumn) {
+		processBlock := func(_ []BlockColumn) {
 			panic(fmt.Errorf("unexpected match"))
 		}
 		tenantIDs := []TenantID{tenantID}
@@ -392,7 +392,7 @@ func TestStorageSearch(t *testing.T) {
 		}
 	}
 
-	t.Run("missing-tenant-smaller-than-existing", func(t *testing.T) {
+	t.Run("missing-tenant-smaller-than-existing", func(_ *testing.T) {
 		tenantID := TenantID{
 			AccountID: 0,
 			ProjectID: 0,
@@ -405,12 +405,12 @@ func TestStorageSearch(t *testing.T) {
 			filter:            f,
 			resultColumnNames: []string{"_msg"},
 		}
-		processBlock := func(workerID uint, br *blockResult) {
+		processBlock := func(_ uint, _ *blockResult) {
 			panic(fmt.Errorf("unexpected match"))
 		}
 		s.search(workersCount, so, nil, processBlock)
 	})
-	t.Run("missing-tenant-bigger-than-existing", func(t *testing.T) {
+	t.Run("missing-tenant-bigger-than-existing", func(_ *testing.T) {
 		tenantID := TenantID{
 			AccountID: tenantsCount + 1,
 			ProjectID: 0,
@@ -423,12 +423,12 @@ func TestStorageSearch(t *testing.T) {
 			filter:            f,
 			resultColumnNames: []string{"_msg"},
 		}
-		processBlock := func(workerID uint, br *blockResult) {
+		processBlock := func(_ uint, _ *blockResult) {
 			panic(fmt.Errorf("unexpected match"))
 		}
 		s.search(workersCount, so, nil, processBlock)
 	})
-	t.Run("missing-tenant-middle", func(t *testing.T) {
+	t.Run("missing-tenant-middle", func(_ *testing.T) {
 		tenantID := TenantID{
 			AccountID: 1,
 			ProjectID: 0,
@@ -441,7 +441,7 @@ func TestStorageSearch(t *testing.T) {
 			filter:            f,
 			resultColumnNames: []string{"_msg"},
 		}
-		processBlock := func(workerID uint, br *blockResult) {
+		processBlock := func(_ uint, _ *blockResult) {
 			panic(fmt.Errorf("unexpected match"))
 		}
 		s.search(workersCount, so, nil, processBlock)
@@ -461,7 +461,7 @@ func TestStorageSearch(t *testing.T) {
 				resultColumnNames: []string{"_msg"},
 			}
 			rowsCount := uint32(0)
-			processBlock := func(workerID uint, br *blockResult) {
+			processBlock := func(_ uint, br *blockResult) {
 				if !br.streamID.tenantID.equal(&tenantID) {
 					panic(fmt.Errorf("unexpected tenantID; got %s; want %s", &br.streamID.tenantID, &tenantID))
 				}
@@ -485,7 +485,7 @@ func TestStorageSearch(t *testing.T) {
 			resultColumnNames: []string{"_msg"},
 		}
 		rowsCount := uint32(0)
-		processBlock := func(workerID uint, br *blockResult) {
+		processBlock := func(_ uint, br *blockResult) {
 			atomic.AddUint32(&rowsCount, uint32(br.RowsCount()))
 		}
 		s.search(workersCount, so, nil, processBlock)
@@ -495,7 +495,7 @@ func TestStorageSearch(t *testing.T) {
 			t.Fatalf("unexpected number of matching rows; got %d; want %d", rowsCount, expectedRowsCount)
 		}
 	})
-	t.Run("stream-filter-mismatch", func(t *testing.T) {
+	t.Run("stream-filter-mismatch", func(_ *testing.T) {
 		sf := mustNewStreamFilter(`{job="foobar",instance=~"host-.+:2345"}`)
 		minTimestamp := baseTimestamp
 		maxTimestamp := baseTimestamp + rowsPerBlock*1e9 + blocksPerStream
@@ -505,7 +505,7 @@ func TestStorageSearch(t *testing.T) {
 			filter:            f,
 			resultColumnNames: []string{"_msg"},
 		}
-		processBlock := func(workerID uint, br *blockResult) {
+		processBlock := func(_ uint, _ *blockResult) {
 			panic(fmt.Errorf("unexpected match"))
 		}
 		s.search(workersCount, so, nil, processBlock)
@@ -526,7 +526,7 @@ func TestStorageSearch(t *testing.T) {
 				resultColumnNames: []string{"_msg"},
 			}
 			rowsCount := uint32(0)
-			processBlock := func(workerID uint, br *blockResult) {
+			processBlock := func(_ uint, br *blockResult) {
 				if !br.streamID.tenantID.equal(&tenantID) {
 					panic(fmt.Errorf("unexpected tenantID; got %s; want %s", &br.streamID.tenantID, &tenantID))
 				}
@@ -555,7 +555,7 @@ func TestStorageSearch(t *testing.T) {
 			resultColumnNames: []string{"_msg"},
 		}
 		rowsCount := uint32(0)
-		processBlock := func(workerID uint, br *blockResult) {
+		processBlock := func(_ uint, br *blockResult) {
 			if !br.streamID.tenantID.equal(&tenantID) {
 				panic(fmt.Errorf("unexpected tenantID; got %s; want %s", &br.streamID.tenantID, &tenantID))
 			}
@@ -592,7 +592,7 @@ func TestStorageSearch(t *testing.T) {
 			resultColumnNames: []string{"_msg"},
 		}
 		rowsCount := uint32(0)
-		processBlock := func(workerID uint, br *blockResult) {
+		processBlock := func(_ uint, br *blockResult) {
 			if !br.streamID.tenantID.equal(&tenantID) {
 				panic(fmt.Errorf("unexpected tenantID; got %s; want %s", &br.streamID.tenantID, &tenantID))
 			}
@@ -620,7 +620,7 @@ func TestStorageSearch(t *testing.T) {
 			resultColumnNames: []string{"_msg"},
 		}
 		rowsCount := uint32(0)
-		processBlock := func(workerID uint, br *blockResult) {
+		processBlock := func(_ uint, br *blockResult) {
 			atomic.AddUint32(&rowsCount, uint32(br.RowsCount()))
 		}
 		s.search(workersCount, so, nil, processBlock)
@@ -630,7 +630,7 @@ func TestStorageSearch(t *testing.T) {
 			t.Fatalf("unexpected number of rows; got %d; want %d", rowsCount, expectedRowsCount)
 		}
 	})
-	t.Run("matching-stream-id-missing-time-range", func(t *testing.T) {
+	t.Run("matching-stream-id-missing-time-range", func(_ *testing.T) {
 		sf := mustNewStreamFilter(`{job="foobar",instance="host-1:234"}`)
 		tenantID := TenantID{
 			AccountID: 1,
@@ -644,7 +644,7 @@ func TestStorageSearch(t *testing.T) {
 			filter:            f,
 			resultColumnNames: []string{"_msg"},
 		}
-		processBlock := func(workerID uint, br *blockResult) {
+		processBlock := func(_ uint, _ *blockResult) {
 			panic(fmt.Errorf("unexpected match"))
 		}
 		s.search(workersCount, so, nil, processBlock)
diff --git a/lib/netutil/tls.go b/lib/netutil/tls.go
index 03cc671aeb..e1e0ccf574 100644
--- a/lib/netutil/tls.go
+++ b/lib/netutil/tls.go
@@ -32,7 +32,7 @@ func GetServerTLSConfig(tlsCertFile, tlsKeyFile, tlsMinVersion string, tlsCipher
 		MinVersion: minVersion,
 		// Do not set MaxVersion, since this has no sense from security PoV.
 		// This can only result in lower security level if improperly set.
-		GetCertificate: func(info *tls.ClientHelloInfo) (*tls.Certificate, error) {
+		GetCertificate: func(_ *tls.ClientHelloInfo) (*tls.Certificate, error) {
 			certLock.Lock()
 			defer certLock.Unlock()
 			if fasttime.UnixTimestamp() > certDeadline {
diff --git a/lib/persistentqueue/persistentqueue_test.go b/lib/persistentqueue/persistentqueue_test.go
index 2099d9e751..b518d625a7 100644
--- a/lib/persistentqueue/persistentqueue_test.go
+++ b/lib/persistentqueue/persistentqueue_test.go
@@ -22,7 +22,7 @@ func TestQueueOpenClose(t *testing.T) {
 }
 
 func TestQueueOpen(t *testing.T) {
-	t.Run("invalid-metainfo", func(t *testing.T) {
+	t.Run("invalid-metainfo", func(_ *testing.T) {
 		path := "queue-open-invalid-metainfo"
 		mustCreateDir(path)
 		mustCreateFile(filepath.Join(path, metainfoFilename), "foobarbaz")
@@ -30,7 +30,7 @@ func TestQueueOpen(t *testing.T) {
 		q.MustClose()
 		mustDeleteDir(path)
 	})
-	t.Run("junk-files-and-dirs", func(t *testing.T) {
+	t.Run("junk-files-and-dirs", func(_ *testing.T) {
 		path := "queue-open-junk-files-and-dir"
 		mustCreateDir(path)
 		mustCreateEmptyMetainfo(path, "foobar")
@@ -40,7 +40,7 @@ func TestQueueOpen(t *testing.T) {
 		q.MustClose()
 		mustDeleteDir(path)
 	})
-	t.Run("invalid-chunk-offset", func(t *testing.T) {
+	t.Run("invalid-chunk-offset", func(_ *testing.T) {
 		path := "queue-open-invalid-chunk-offset"
 		mustCreateDir(path)
 		mustCreateEmptyMetainfo(path, "foobar")
@@ -49,7 +49,7 @@ func TestQueueOpen(t *testing.T) {
 		q.MustClose()
 		mustDeleteDir(path)
 	})
-	t.Run("too-new-chunk", func(t *testing.T) {
+	t.Run("too-new-chunk", func(_ *testing.T) {
 		path := "queue-open-too-new-chunk"
 		mustCreateDir(path)
 		mustCreateEmptyMetainfo(path, "foobar")
@@ -88,7 +88,7 @@ func TestQueueOpen(t *testing.T) {
 		q.MustClose()
 		mustDeleteDir(path)
 	})
-	t.Run("metainfo-dir", func(t *testing.T) {
+	t.Run("metainfo-dir", func(_ *testing.T) {
 		path := "queue-open-metainfo-dir"
 		mustCreateDir(path)
 		mustCreateDir(filepath.Join(path, metainfoFilename))
@@ -112,7 +112,7 @@ func TestQueueOpen(t *testing.T) {
 		q.MustClose()
 		mustDeleteDir(path)
 	})
-	t.Run("invalid-writer-file-size", func(t *testing.T) {
+	t.Run("invalid-writer-file-size", func(_ *testing.T) {
 		path := "too-small-reader-file"
 		mustCreateDir(path)
 		mustCreateEmptyMetainfo(path, "foobar")
diff --git a/lib/promrelabel/config_test.go b/lib/promrelabel/config_test.go
index 41f4327392..6aa7b73075 100644
--- a/lib/promrelabel/config_test.go
+++ b/lib/promrelabel/config_test.go
@@ -101,10 +101,10 @@ func TestLoadRelabelConfigsFailure(t *testing.T) {
 			t.Fatalf("unexpected non-empty rcs: %#v", rcs)
 		}
 	}
-	t.Run("non-existing-file", func(t *testing.T) {
+	t.Run("non-existing-file", func(_ *testing.T) {
 		f("testdata/non-exsiting-file")
 	})
-	t.Run("invalid-file", func(t *testing.T) {
+	t.Run("invalid-file", func(_ *testing.T) {
 		f("testdata/invalid_config.yml")
 	})
 }
@@ -209,7 +209,7 @@ func TestParseRelabelConfigsFailure(t *testing.T) {
 			t.Fatalf("unexpected non-empty pcs: %#v", pcs)
 		}
 	}
-	t.Run("invalid-regex", func(t *testing.T) {
+	t.Run("invalid-regex", func(_ *testing.T) {
 		f([]RelabelConfig{
 			{
 				SourceLabels: []string{"aaa"},
@@ -220,7 +220,7 @@ func TestParseRelabelConfigsFailure(t *testing.T) {
 			},
 		})
 	})
-	t.Run("replace-missing-target-label", func(t *testing.T) {
+	t.Run("replace-missing-target-label", func(_ *testing.T) {
 		f([]RelabelConfig{
 			{
 				Action:       "replace",
@@ -228,7 +228,7 @@ func TestParseRelabelConfigsFailure(t *testing.T) {
 			},
 		})
 	})
-	t.Run("replace_all-missing-source-labels", func(t *testing.T) {
+	t.Run("replace_all-missing-source-labels", func(_ *testing.T) {
 		f([]RelabelConfig{
 			{
 				Action:      "replace_all",
@@ -236,7 +236,7 @@ func TestParseRelabelConfigsFailure(t *testing.T) {
 			},
 		})
 	})
-	t.Run("replace_all-missing-target-label", func(t *testing.T) {
+	t.Run("replace_all-missing-target-label", func(_ *testing.T) {
 		f([]RelabelConfig{
 			{
 				Action:       "replace_all",
@@ -244,21 +244,21 @@ func TestParseRelabelConfigsFailure(t *testing.T) {
 			},
 		})
 	})
-	t.Run("keep-missing-source-labels", func(t *testing.T) {
+	t.Run("keep-missing-source-labels", func(_ *testing.T) {
 		f([]RelabelConfig{
 			{
 				Action: "keep",
 			},
 		})
 	})
-	t.Run("keep_if_equal-missing-source-labels", func(t *testing.T) {
+	t.Run("keep_if_equal-missing-source-labels", func(_ *testing.T) {
 		f([]RelabelConfig{
 			{
 				Action: "keep_if_equal",
 			},
 		})
 	})
-	t.Run("keep_if_equal-single-source-label", func(t *testing.T) {
+	t.Run("keep_if_equal-single-source-label", func(_ *testing.T) {
 		f([]RelabelConfig{
 			{
 				Action:       "keep_if_equal",
@@ -266,7 +266,7 @@ func TestParseRelabelConfigsFailure(t *testing.T) {
 			},
 		})
 	})
-	t.Run("keep_if_equal-unused-target-label", func(t *testing.T) {
+	t.Run("keep_if_equal-unused-target-label", func(_ *testing.T) {
 		f([]RelabelConfig{
 			{
 				Action:       "keep_if_equal",
@@ -275,7 +275,7 @@ func TestParseRelabelConfigsFailure(t *testing.T) {
 			},
 		})
 	})
-	t.Run("keep_if_equal-unused-regex", func(t *testing.T) {
+	t.Run("keep_if_equal-unused-regex", func(_ *testing.T) {
 		f([]RelabelConfig{
 			{
 				Action:       "keep_if_equal",
@@ -286,14 +286,14 @@ func TestParseRelabelConfigsFailure(t *testing.T) {
 			},
 		})
 	})
-	t.Run("drop_if_equal-missing-source-labels", func(t *testing.T) {
+	t.Run("drop_if_equal-missing-source-labels", func(_ *testing.T) {
 		f([]RelabelConfig{
 			{
 				Action: "drop_if_equal",
 			},
 		})
 	})
-	t.Run("drop_if_equal-single-source-label", func(t *testing.T) {
+	t.Run("drop_if_equal-single-source-label", func(_ *testing.T) {
 		f([]RelabelConfig{
 			{
 				Action:       "drop_if_equal",
@@ -301,7 +301,7 @@ func TestParseRelabelConfigsFailure(t *testing.T) {
 			},
 		})
 	})
-	t.Run("drop_if_equal-unused-target-label", func(t *testing.T) {
+	t.Run("drop_if_equal-unused-target-label", func(_ *testing.T) {
 		f([]RelabelConfig{
 			{
 				Action:       "drop_if_equal",
@@ -310,7 +310,7 @@ func TestParseRelabelConfigsFailure(t *testing.T) {
 			},
 		})
 	})
-	t.Run("drop_if_equal-unused-regex", func(t *testing.T) {
+	t.Run("drop_if_equal-unused-regex", func(_ *testing.T) {
 		f([]RelabelConfig{
 			{
 				Action:       "drop_if_equal",
@@ -321,14 +321,14 @@ func TestParseRelabelConfigsFailure(t *testing.T) {
 			},
 		})
 	})
-	t.Run("keepequal-missing-source-labels", func(t *testing.T) {
+	t.Run("keepequal-missing-source-labels", func(_ *testing.T) {
 		f([]RelabelConfig{
 			{
 				Action: "keepequal",
 			},
 		})
 	})
-	t.Run("keepequal-missing-target-label", func(t *testing.T) {
+	t.Run("keepequal-missing-target-label", func(_ *testing.T) {
 		f([]RelabelConfig{
 			{
 				Action:       "keepequal",
@@ -336,7 +336,7 @@ func TestParseRelabelConfigsFailure(t *testing.T) {
 			},
 		})
 	})
-	t.Run("keepequal-unused-regex", func(t *testing.T) {
+	t.Run("keepequal-unused-regex", func(_ *testing.T) {
 		f([]RelabelConfig{
 			{
 				Action:       "keepequal",
@@ -348,14 +348,14 @@ func TestParseRelabelConfigsFailure(t *testing.T) {
 			},
 		})
 	})
-	t.Run("dropequal-missing-source-labels", func(t *testing.T) {
+	t.Run("dropequal-missing-source-labels", func(_ *testing.T) {
 		f([]RelabelConfig{
 			{
 				Action: "dropequal",
 			},
 		})
 	})
-	t.Run("dropequal-missing-target-label", func(t *testing.T) {
+	t.Run("dropequal-missing-target-label", func(_ *testing.T) {
 		f([]RelabelConfig{
 			{
 				Action:       "dropequal",
@@ -363,7 +363,7 @@ func TestParseRelabelConfigsFailure(t *testing.T) {
 			},
 		})
 	})
-	t.Run("dropequal-unused-regex", func(t *testing.T) {
+	t.Run("dropequal-unused-regex", func(_ *testing.T) {
 		f([]RelabelConfig{
 			{
 				Action:       "dropequal",
@@ -375,14 +375,14 @@ func TestParseRelabelConfigsFailure(t *testing.T) {
 			},
 		})
 	})
-	t.Run("drop-missing-source-labels", func(t *testing.T) {
+	t.Run("drop-missing-source-labels", func(_ *testing.T) {
 		f([]RelabelConfig{
 			{
 				Action: "drop",
 			},
 		})
 	})
-	t.Run("hashmod-missing-source-labels", func(t *testing.T) {
+	t.Run("hashmod-missing-source-labels", func(_ *testing.T) {
 		f([]RelabelConfig{
 			{
 				Action:      "hashmod",
@@ -391,7 +391,7 @@ func TestParseRelabelConfigsFailure(t *testing.T) {
 			},
 		})
 	})
-	t.Run("hashmod-missing-target-label", func(t *testing.T) {
+	t.Run("hashmod-missing-target-label", func(_ *testing.T) {
 		f([]RelabelConfig{
 			{
 				Action:       "hashmod",
@@ -400,7 +400,7 @@ func TestParseRelabelConfigsFailure(t *testing.T) {
 			},
 		})
 	})
-	t.Run("hashmod-missing-modulus", func(t *testing.T) {
+	t.Run("hashmod-missing-modulus", func(_ *testing.T) {
 		f([]RelabelConfig{
 			{
 				Action:       "hashmod",
@@ -409,21 +409,21 @@ func TestParseRelabelConfigsFailure(t *testing.T) {
 			},
 		})
 	})
-	t.Run("invalid-action", func(t *testing.T) {
+	t.Run("invalid-action", func(_ *testing.T) {
 		f([]RelabelConfig{
 			{
 				Action: "invalid-action",
 			},
 		})
 	})
-	t.Run("drop_metrics-missing-regex", func(t *testing.T) {
+	t.Run("drop_metrics-missing-regex", func(_ *testing.T) {
 		f([]RelabelConfig{
 			{
 				Action: "drop_metrics",
 			},
 		})
 	})
-	t.Run("drop_metrics-non-empty-source-labels", func(t *testing.T) {
+	t.Run("drop_metrics-non-empty-source-labels", func(_ *testing.T) {
 		f([]RelabelConfig{
 			{
 				Action:       "drop_metrics",
@@ -434,14 +434,14 @@ func TestParseRelabelConfigsFailure(t *testing.T) {
 			},
 		})
 	})
-	t.Run("keep_metrics-missing-regex", func(t *testing.T) {
+	t.Run("keep_metrics-missing-regex", func(_ *testing.T) {
 		f([]RelabelConfig{
 			{
 				Action: "keep_metrics",
 			},
 		})
 	})
-	t.Run("keep_metrics-non-empty-source-labels", func(t *testing.T) {
+	t.Run("keep_metrics-non-empty-source-labels", func(_ *testing.T) {
 		f([]RelabelConfig{
 			{
 				Action:       "keep_metrics",
@@ -452,7 +452,7 @@ func TestParseRelabelConfigsFailure(t *testing.T) {
 			},
 		})
 	})
-	t.Run("uppercase-missing-sourceLabels", func(t *testing.T) {
+	t.Run("uppercase-missing-sourceLabels", func(_ *testing.T) {
 		f([]RelabelConfig{
 			{
 				Action:      "uppercase",
@@ -460,7 +460,7 @@ func TestParseRelabelConfigsFailure(t *testing.T) {
 			},
 		})
 	})
-	t.Run("lowercase-missing-targetLabel", func(t *testing.T) {
+	t.Run("lowercase-missing-targetLabel", func(_ *testing.T) {
 		f([]RelabelConfig{
 			{
 				Action:       "lowercase",
@@ -468,7 +468,7 @@ func TestParseRelabelConfigsFailure(t *testing.T) {
 			},
 		})
 	})
-	t.Run("graphite-missing-match", func(t *testing.T) {
+	t.Run("graphite-missing-match", func(_ *testing.T) {
 		f([]RelabelConfig{
 			{
 				Action: "graphite",
@@ -478,7 +478,7 @@ func TestParseRelabelConfigsFailure(t *testing.T) {
 			},
 		})
 	})
-	t.Run("graphite-missing-labels", func(t *testing.T) {
+	t.Run("graphite-missing-labels", func(_ *testing.T) {
 		f([]RelabelConfig{
 			{
 				Action: "graphite",
@@ -486,7 +486,7 @@ func TestParseRelabelConfigsFailure(t *testing.T) {
 			},
 		})
 	})
-	t.Run("graphite-superflouous-sourceLabels", func(t *testing.T) {
+	t.Run("graphite-superflouous-sourceLabels", func(_ *testing.T) {
 		f([]RelabelConfig{
 			{
 				Action: "graphite",
@@ -498,7 +498,7 @@ func TestParseRelabelConfigsFailure(t *testing.T) {
 			},
 		})
 	})
-	t.Run("graphite-superflouous-targetLabel", func(t *testing.T) {
+	t.Run("graphite-superflouous-targetLabel", func(_ *testing.T) {
 		f([]RelabelConfig{
 			{
 				Action: "graphite",
@@ -511,7 +511,7 @@ func TestParseRelabelConfigsFailure(t *testing.T) {
 		})
 	})
 	replacement := "foo"
-	t.Run("graphite-superflouous-replacement", func(t *testing.T) {
+	t.Run("graphite-superflouous-replacement", func(_ *testing.T) {
 		f([]RelabelConfig{
 			{
 				Action: "graphite",
@@ -524,7 +524,7 @@ func TestParseRelabelConfigsFailure(t *testing.T) {
 		})
 	})
 	var re MultiLineRegex
-	t.Run("graphite-superflouous-regex", func(t *testing.T) {
+	t.Run("graphite-superflouous-regex", func(_ *testing.T) {
 		f([]RelabelConfig{
 			{
 				Action: "graphite",
@@ -536,7 +536,7 @@ func TestParseRelabelConfigsFailure(t *testing.T) {
 			},
 		})
 	})
-	t.Run("non-graphite-superflouos-match", func(t *testing.T) {
+	t.Run("non-graphite-superflouos-match", func(_ *testing.T) {
 		f([]RelabelConfig{
 			{
 				Action:       "uppercase",
@@ -546,7 +546,7 @@ func TestParseRelabelConfigsFailure(t *testing.T) {
 			},
 		})
 	})
-	t.Run("non-graphite-superflouos-labels", func(t *testing.T) {
+	t.Run("non-graphite-superflouos-labels", func(_ *testing.T) {
 		f([]RelabelConfig{
 			{
 				Action:       "uppercase",
diff --git a/lib/promrelabel/relabel_test.go b/lib/promrelabel/relabel_test.go
index df8080af48..7bc26739cc 100644
--- a/lib/promrelabel/relabel_test.go
+++ b/lib/promrelabel/relabel_test.go
@@ -181,14 +181,14 @@ func TestParsedRelabelConfigsApply(t *testing.T) {
 			t.Fatalf("unexpected result; got\n%s\nwant\n%s", result, resultExpected)
 		}
 	}
-	t.Run("empty_relabel_configs", func(t *testing.T) {
+	t.Run("empty_relabel_configs", func(_ *testing.T) {
 		f("", `{}`, false, `{}`)
 		f("", `{}`, true, `{}`)
 		f("", `{foo="bar"}`, false, `{foo="bar"}`)
 		f("", `xxx{foo="bar",__aaa="yyy"}`, false, `xxx{__aaa="yyy",foo="bar"}`)
 		f("", `xxx{foo="bar",__aaa="yyy"}`, true, `xxx{foo="bar"}`)
 	})
-	t.Run("replace-miss", func(t *testing.T) {
+	t.Run("replace-miss", func(_ *testing.T) {
 		f(`
 - action: replace
   target_label: bar
@@ -216,7 +216,7 @@ func TestParsedRelabelConfigsApply(t *testing.T) {
   regex: ".+"
 `, `{xxx="yyy"}`, false, `{xxx="yyy"}`)
 	})
-	t.Run("replace-if-miss", func(t *testing.T) {
+	t.Run("replace-if-miss", func(_ *testing.T) {
 		f(`
 - action: replace
   if: '{foo="bar"}'
@@ -225,7 +225,7 @@ func TestParsedRelabelConfigsApply(t *testing.T) {
   replacement: "a-$1-b"
 `, `{xxx="yyy"}`, false, `{xxx="yyy"}`)
 	})
-	t.Run("replace-hit", func(t *testing.T) {
+	t.Run("replace-hit", func(_ *testing.T) {
 		f(`
 - action: replace
   source_labels: ["xxx", "foo"]
@@ -243,7 +243,7 @@ func TestParsedRelabelConfigsApply(t *testing.T) {
   target_label: "xxx"
 `, `{xxx="yyy"}`, false, `{}`)
 	})
-	t.Run("replace-if-hit", func(t *testing.T) {
+	t.Run("replace-if-hit", func(_ *testing.T) {
 		f(`
 - action: replace
   if: '{xxx=~".y."}'
@@ -252,7 +252,7 @@ func TestParsedRelabelConfigsApply(t *testing.T) {
   replacement: "a-$1-b"
 `, `{xxx="yyy"}`, false, `{bar="a-yyy;-b",xxx="yyy"}`)
 	})
-	t.Run("replace-remove-label-value-hit", func(t *testing.T) {
+	t.Run("replace-remove-label-value-hit", func(_ *testing.T) {
 		f(`
 - action: replace
   source_labels: ["foo"]
@@ -261,7 +261,7 @@ func TestParsedRelabelConfigsApply(t *testing.T) {
   replacement: ""
 `, `{foo="xxx",bar="baz"}`, false, `{bar="baz"}`)
 	})
-	t.Run("replace-remove-label-value-miss", func(t *testing.T) {
+	t.Run("replace-remove-label-value-miss", func(_ *testing.T) {
 		f(`
 - action: replace
   source_labels: ["foo"]
@@ -270,7 +270,7 @@ func TestParsedRelabelConfigsApply(t *testing.T) {
   replacement: ""
 `, `{foo="yyy",bar="baz"}`, false, `{bar="baz",foo="yyy"}`)
 	})
-	t.Run("replace-hit-remove-label", func(t *testing.T) {
+	t.Run("replace-hit-remove-label", func(_ *testing.T) {
 		f(`
 - action: replace
   source_labels: ["xxx", "foo"]
@@ -279,7 +279,7 @@ func TestParsedRelabelConfigsApply(t *testing.T) {
   replacement: ""
 `, `{xxx="yyy",foo="bar"}`, false, `{xxx="yyy"}`)
 	})
-	t.Run("replace-miss-remove-label", func(t *testing.T) {
+	t.Run("replace-miss-remove-label", func(_ *testing.T) {
 		f(`
 - action: replace
   source_labels: ["xxx", "foo"]
@@ -288,7 +288,7 @@ func TestParsedRelabelConfigsApply(t *testing.T) {
   replacement: ""
 `, `{xxx="yyyz",foo="bar"}`, false, `{foo="bar",xxx="yyyz"}`)
 	})
-	t.Run("replace-hit-target-label-with-capture-group", func(t *testing.T) {
+	t.Run("replace-hit-target-label-with-capture-group", func(_ *testing.T) {
 		f(`
 - action: replace
   source_labels: ["xxx", "foo"]
@@ -296,7 +296,7 @@ func TestParsedRelabelConfigsApply(t *testing.T) {
   replacement: "a-$1-b"
 `, `{xxx="yyy"}`, false, `{bar-yyy;="a-yyy;-b",xxx="yyy"}`)
 	})
-	t.Run("replace_all-miss", func(t *testing.T) {
+	t.Run("replace_all-miss", func(_ *testing.T) {
 		f(`
 - action: replace_all
   source_labels: [foo]
@@ -319,7 +319,7 @@ func TestParsedRelabelConfigsApply(t *testing.T) {
   regex: ".+"
 `, `{xxx="yyy"}`, false, `{xxx="yyy"}`)
 	})
-	t.Run("replace_all-if-miss", func(t *testing.T) {
+	t.Run("replace_all-if-miss", func(_ *testing.T) {
 		f(`
 - action: replace_all
   if: 'foo'
@@ -329,7 +329,7 @@ func TestParsedRelabelConfigsApply(t *testing.T) {
   replacement: "."
 `, `{xxx="a-b-c"}`, false, `{xxx="a-b-c"}`)
 	})
-	t.Run("replace_all-hit", func(t *testing.T) {
+	t.Run("replace_all-hit", func(_ *testing.T) {
 		f(`
 - action: replace_all
   source_labels: ["xxx"]
@@ -338,7 +338,7 @@ func TestParsedRelabelConfigsApply(t *testing.T) {
   replacement: "."
 `, `{xxx="a-b-c"}`, false, `{xxx="a.b.c"}`)
 	})
-	t.Run("replace_all-if-hit", func(t *testing.T) {
+	t.Run("replace_all-if-hit", func(_ *testing.T) {
 		f(`
 - action: replace_all
   if: '{non_existing_label=~".*"}'
@@ -348,7 +348,7 @@ func TestParsedRelabelConfigsApply(t *testing.T) {
   replacement: "."
 `, `{xxx="a-b-c"}`, false, `{xxx="a.b.c"}`)
 	})
-	t.Run("replace_all-regex-hit", func(t *testing.T) {
+	t.Run("replace_all-regex-hit", func(_ *testing.T) {
 		f(`
 - action: replace_all
   source_labels: ["xxx", "foo"]
@@ -357,7 +357,7 @@ func TestParsedRelabelConfigsApply(t *testing.T) {
   replacement: "-$1-"
 `, `{xxx="y;y"}`, false, `{xxx="y-;-y-;-"}`)
 	})
-	t.Run("replace-add-multi-labels", func(t *testing.T) {
+	t.Run("replace-add-multi-labels", func(_ *testing.T) {
 		f(`
 - action: replace
   source_labels: ["xxx"]
@@ -369,7 +369,7 @@ func TestParsedRelabelConfigsApply(t *testing.T) {
   replacement: "b-$1"
 `, `{xxx="yyy",instance="a.bc"}`, true, `{bar="a-yyy",instance="a.bc",xxx="yyy",zar="b-a-yyy"}`)
 	})
-	t.Run("replace-self", func(t *testing.T) {
+	t.Run("replace-self", func(_ *testing.T) {
 		f(`
 - action: replace
   source_labels: ["foo"]
@@ -377,14 +377,14 @@ func TestParsedRelabelConfigsApply(t *testing.T) {
   replacement: "a-$1"
 `, `{foo="aaxx"}`, true, `{foo="a-aaxx"}`)
 	})
-	t.Run("replace-missing-source", func(t *testing.T) {
+	t.Run("replace-missing-source", func(_ *testing.T) {
 		f(`
 - action: replace
   target_label: foo
   replacement: "foobar"
 `, `{}`, true, `{foo="foobar"}`)
 	})
-	t.Run("keep_if_equal-miss", func(t *testing.T) {
+	t.Run("keep_if_equal-miss", func(_ *testing.T) {
 		f(`
 - action: keep_if_equal
   source_labels: ["foo", "bar"]
@@ -394,13 +394,13 @@ func TestParsedRelabelConfigsApply(t *testing.T) {
   source_labels: ["xxx", "bar"]
 `, `{xxx="yyy"}`, true, `{}`)
 	})
-	t.Run("keep_if_equal-hit", func(t *testing.T) {
+	t.Run("keep_if_equal-hit", func(_ *testing.T) {
 		f(`
 - action: keep_if_equal
   source_labels: ["xxx", "bar"]
 `, `{xxx="yyy",bar="yyy"}`, true, `{bar="yyy",xxx="yyy"}`)
 	})
-	t.Run("drop_if_equal-miss", func(t *testing.T) {
+	t.Run("drop_if_equal-miss", func(_ *testing.T) {
 		f(`
 - action: drop_if_equal
   source_labels: ["foo", "bar"]
@@ -410,41 +410,41 @@ func TestParsedRelabelConfigsApply(t *testing.T) {
   source_labels: ["xxx", "bar"]
 `, `{xxx="yyy"}`, true, `{xxx="yyy"}`)
 	})
-	t.Run("drop_if_equal-hit", func(t *testing.T) {
+	t.Run("drop_if_equal-hit", func(_ *testing.T) {
 		f(`
 - action: drop_if_equal
   source_labels: [xxx, bar]
 `, `{xxx="yyy",bar="yyy"}`, true, `{}`)
 	})
-	t.Run("keepequal-hit", func(t *testing.T) {
+	t.Run("keepequal-hit", func(_ *testing.T) {
 		f(`
 - action: keepequal
   source_labels: [foo]
   target_label: bar
 `, `{foo="a",bar="a"}`, true, `{bar="a",foo="a"}`)
 	})
-	t.Run("keepequal-miss", func(t *testing.T) {
+	t.Run("keepequal-miss", func(_ *testing.T) {
 		f(`
 - action: keepequal
   source_labels: [foo]
   target_label: bar
 `, `{foo="a",bar="x"}`, true, `{}`)
 	})
-	t.Run("dropequal-hit", func(t *testing.T) {
+	t.Run("dropequal-hit", func(_ *testing.T) {
 		f(`
 - action: dropequal
   source_labels: [foo]
   target_label: bar
 `, `{foo="a",bar="a"}`, true, `{}`)
 	})
-	t.Run("dropequal-miss", func(t *testing.T) {
+	t.Run("dropequal-miss", func(_ *testing.T) {
 		f(`
 - action: dropequal
   source_labels: [foo]
   target_label: bar
 `, `{foo="a",bar="x"}`, true, `{bar="x",foo="a"}`)
 	})
-	t.Run("keep-miss", func(t *testing.T) {
+	t.Run("keep-miss", func(_ *testing.T) {
 		f(`
 - action: keep
   source_labels: [foo]
@@ -456,33 +456,33 @@ func TestParsedRelabelConfigsApply(t *testing.T) {
   regex: ".+"
 `, `{xxx="yyy"}`, true, `{}`)
 	})
-	t.Run("keep-if-miss", func(t *testing.T) {
+	t.Run("keep-if-miss", func(_ *testing.T) {
 		f(`
 - action: keep
   if: '{foo="bar"}'
 `, `{foo="yyy"}`, false, `{}`)
 	})
-	t.Run("keep-if-hit", func(t *testing.T) {
+	t.Run("keep-if-hit", func(_ *testing.T) {
 		f(`
 - action: keep
   if: ['foobar', '{foo="yyy"}', '{a="b"}']
 `, `{foo="yyy"}`, false, `{foo="yyy"}`)
 	})
-	t.Run("keep-hit", func(t *testing.T) {
+	t.Run("keep-hit", func(_ *testing.T) {
 		f(`
 - action: keep
   source_labels: [foo]
   regex: "yyy"
 `, `{foo="yyy"}`, false, `{foo="yyy"}`)
 	})
-	t.Run("keep-hit-regexp", func(t *testing.T) {
+	t.Run("keep-hit-regexp", func(_ *testing.T) {
 		f(`
 - action: keep
   source_labels: ["foo"]
   regex: ".+"
 `, `{foo="yyy"}`, false, `{foo="yyy"}`)
 	})
-	t.Run("keep_metrics-miss", func(t *testing.T) {
+	t.Run("keep_metrics-miss", func(_ *testing.T) {
 		f(`
 - action: keep_metrics
   regex:
@@ -490,19 +490,19 @@ func TestParsedRelabelConfigsApply(t *testing.T) {
   - bar
 `, `xxx`, true, `{}`)
 	})
-	t.Run("keep_metrics-if-miss", func(t *testing.T) {
+	t.Run("keep_metrics-if-miss", func(_ *testing.T) {
 		f(`
 - action: keep_metrics
   if: 'bar'
 `, `foo`, true, `{}`)
 	})
-	t.Run("keep_metrics-if-hit", func(t *testing.T) {
+	t.Run("keep_metrics-if-hit", func(_ *testing.T) {
 		f(`
 - action: keep_metrics
   if: 'foo'
 `, `foo`, true, `foo`)
 	})
-	t.Run("keep_metrics-hit", func(t *testing.T) {
+	t.Run("keep_metrics-hit", func(_ *testing.T) {
 		f(`
 - action: keep_metrics
   regex:
@@ -510,7 +510,7 @@ func TestParsedRelabelConfigsApply(t *testing.T) {
   - bar
 `, `foo`, true, `foo`)
 	})
-	t.Run("drop-miss", func(t *testing.T) {
+	t.Run("drop-miss", func(_ *testing.T) {
 		f(`
 - action: drop
   source_labels: [foo]
@@ -522,33 +522,33 @@ func TestParsedRelabelConfigsApply(t *testing.T) {
   regex: ".+"
 `, `{xxx="yyy"}`, true, `{xxx="yyy"}`)
 	})
-	t.Run("drop-if-miss", func(t *testing.T) {
+	t.Run("drop-if-miss", func(_ *testing.T) {
 		f(`
 - action: drop
   if: '{foo="bar"}'
 `, `{foo="yyy"}`, true, `{foo="yyy"}`)
 	})
-	t.Run("drop-if-hit", func(t *testing.T) {
+	t.Run("drop-if-hit", func(_ *testing.T) {
 		f(`
 - action: drop
   if: '{foo="yyy"}'
 `, `{foo="yyy"}`, true, `{}`)
 	})
-	t.Run("drop-hit", func(t *testing.T) {
+	t.Run("drop-hit", func(_ *testing.T) {
 		f(`
 - action: drop
   source_labels: [foo]
   regex: yyy
 `, `{foo="yyy"}`, true, `{}`)
 	})
-	t.Run("drop-hit-regexp", func(t *testing.T) {
+	t.Run("drop-hit-regexp", func(_ *testing.T) {
 		f(`
 - action: drop
   source_labels: [foo]
   regex: ".+"
 `, `{foo="yyy"}`, true, `{}`)
 	})
-	t.Run("drop_metrics-miss", func(t *testing.T) {
+	t.Run("drop_metrics-miss", func(_ *testing.T) {
 		f(`
 - action: drop_metrics
   regex:
@@ -556,19 +556,19 @@ func TestParsedRelabelConfigsApply(t *testing.T) {
   - bar
 `, `xxx`, true, `xxx`)
 	})
-	t.Run("drop_metrics-if-miss", func(t *testing.T) {
+	t.Run("drop_metrics-if-miss", func(_ *testing.T) {
 		f(`
 - action: drop_metrics
   if: bar
 `, `foo`, true, `foo`)
 	})
-	t.Run("drop_metrics-if-hit", func(t *testing.T) {
+	t.Run("drop_metrics-if-hit", func(_ *testing.T) {
 		f(`
 - action: drop_metrics
   if: foo
 `, `foo`, true, `{}`)
 	})
-	t.Run("drop_metrics-hit", func(t *testing.T) {
+	t.Run("drop_metrics-hit", func(_ *testing.T) {
 		f(`
 - action: drop_metrics
   regex:
@@ -576,7 +576,7 @@ func TestParsedRelabelConfigsApply(t *testing.T) {
   - bar
 `, `foo`, true, `{}`)
 	})
-	t.Run("hashmod-miss", func(t *testing.T) {
+	t.Run("hashmod-miss", func(_ *testing.T) {
 		f(`
 - action: hashmod
   source_labels: [foo]
@@ -584,7 +584,7 @@ func TestParsedRelabelConfigsApply(t *testing.T) {
   modulus: 123
 `, `{xxx="yyy"}`, false, `{aaa="81",xxx="yyy"}`)
 	})
-	t.Run("hashmod-if-miss", func(t *testing.T) {
+	t.Run("hashmod-if-miss", func(_ *testing.T) {
 		f(`
 - action: hashmod
   if: '{foo="bar"}'
@@ -593,7 +593,7 @@ func TestParsedRelabelConfigsApply(t *testing.T) {
   modulus: 123
 `, `{foo="yyy"}`, true, `{foo="yyy"}`)
 	})
-	t.Run("hashmod-if-hit", func(t *testing.T) {
+	t.Run("hashmod-if-hit", func(_ *testing.T) {
 		f(`
 - action: hashmod
   if: '{foo="yyy"}'
@@ -602,7 +602,7 @@ func TestParsedRelabelConfigsApply(t *testing.T) {
   modulus: 123
 `, `{foo="yyy"}`, true, `{aaa="73",foo="yyy"}`)
 	})
-	t.Run("hashmod-hit", func(t *testing.T) {
+	t.Run("hashmod-hit", func(_ *testing.T) {
 		f(`
 - action: hashmod
   source_labels: [foo]
@@ -610,7 +610,7 @@ func TestParsedRelabelConfigsApply(t *testing.T) {
   modulus: 123
 `, `{foo="yyy"}`, true, `{aaa="73",foo="yyy"}`)
 	})
-	t.Run("labelmap-copy-label-if-miss", func(t *testing.T) {
+	t.Run("labelmap-copy-label-if-miss", func(_ *testing.T) {
 		f(`
 - action: labelmap
   if: '{foo="yyy",foobar="aab"}'
@@ -618,7 +618,7 @@ func TestParsedRelabelConfigsApply(t *testing.T) {
   replacement: "bar"
 `, `{foo="yyy",foobar="aaa"}`, true, `{foo="yyy",foobar="aaa"}`)
 	})
-	t.Run("labelmap-copy-label-if-hit", func(t *testing.T) {
+	t.Run("labelmap-copy-label-if-hit", func(_ *testing.T) {
 		f(`
 - action: labelmap
   if: '{foo="yyy",foobar="aaa"}'
@@ -626,33 +626,33 @@ func TestParsedRelabelConfigsApply(t *testing.T) {
   replacement: "bar"
 `, `{foo="yyy",foobar="aaa"}`, true, `{bar="yyy",foo="yyy",foobar="aaa"}`)
 	})
-	t.Run("labelmap-copy-label", func(t *testing.T) {
+	t.Run("labelmap-copy-label", func(_ *testing.T) {
 		f(`
 - action: labelmap
   regex: "foo"
   replacement: "bar"
 `, `{foo="yyy",foobar="aaa"}`, true, `{bar="yyy",foo="yyy",foobar="aaa"}`)
 	})
-	t.Run("labelmap-remove-prefix-dot-star", func(t *testing.T) {
+	t.Run("labelmap-remove-prefix-dot-star", func(_ *testing.T) {
 		f(`
 - action: labelmap
   regex: "foo(.*)"
 `, `{xoo="yyy",foobar="aaa"}`, true, `{bar="aaa",foobar="aaa",xoo="yyy"}`)
 	})
-	t.Run("labelmap-remove-prefix-dot-plus", func(t *testing.T) {
+	t.Run("labelmap-remove-prefix-dot-plus", func(_ *testing.T) {
 		f(`
 - action: labelmap
   regex: "foo(.+)"
 `, `{foo="yyy",foobar="aaa"}`, true, `{bar="aaa",foo="yyy",foobar="aaa"}`)
 	})
-	t.Run("labelmap-regex", func(t *testing.T) {
+	t.Run("labelmap-regex", func(_ *testing.T) {
 		f(`
 - action: labelmap
   regex: "foo(.+)"
   replacement: "$1-x"
 `, `{foo="yyy",foobar="aaa"}`, true, `{bar-x="aaa",foo="yyy",foobar="aaa"}`)
 	})
-	t.Run("labelmap_all-if-miss", func(t *testing.T) {
+	t.Run("labelmap_all-if-miss", func(_ *testing.T) {
 		f(`
 - action: labelmap_all
   if: foobar
@@ -660,7 +660,7 @@ func TestParsedRelabelConfigsApply(t *testing.T) {
   replacement: "-"
 `, `{foo.bar.baz="yyy",foobar="aaa"}`, true, `{foo.bar.baz="yyy",foobar="aaa"}`)
 	})
-	t.Run("labelmap_all-if-hit", func(t *testing.T) {
+	t.Run("labelmap_all-if-hit", func(_ *testing.T) {
 		f(`
 - action: labelmap_all
   if: '{foo.bar.baz="yyy"}'
@@ -668,21 +668,21 @@ func TestParsedRelabelConfigsApply(t *testing.T) {
   replacement: "-"
 `, `{foo.bar.baz="yyy",foobar="aaa"}`, true, `{foo-bar-baz="yyy",foobar="aaa"}`)
 	})
-	t.Run("labelmap_all", func(t *testing.T) {
+	t.Run("labelmap_all", func(_ *testing.T) {
 		f(`
 - action: labelmap_all
   regex: "\\."
   replacement: "-"
 `, `{foo.bar.baz="yyy",foobar="aaa"}`, true, `{foo-bar-baz="yyy",foobar="aaa"}`)
 	})
-	t.Run("labelmap_all-regexp", func(t *testing.T) {
+	t.Run("labelmap_all-regexp", func(_ *testing.T) {
 		f(`
 - action: labelmap_all
   regex: "ba(.)"
   replacement: "${1}ss"
 `, `{foo.bar.baz="yyy",foozar="aaa"}`, true, `{foo.rss.zss="yyy",foozar="aaa"}`)
 	})
-	t.Run("labeldrop", func(t *testing.T) {
+	t.Run("labeldrop", func(_ *testing.T) {
 		f(`
 - action: labeldrop
   regex: dropme
@@ -714,7 +714,7 @@ func TestParsedRelabelConfigsApply(t *testing.T) {
   regex: "dropme"
 `, `{xxx="yyy",dropme="aaa"}`, false, `{xxx="yyy"}`)
 	})
-	t.Run("labeldrop-prefix", func(t *testing.T) {
+	t.Run("labeldrop-prefix", func(_ *testing.T) {
 		f(`
 - action: labeldrop
   regex: "dropme.*"
@@ -724,7 +724,7 @@ func TestParsedRelabelConfigsApply(t *testing.T) {
   regex: "dropme(.+)"
 `, `{xxx="yyy",dropme-please="aaa",foo="bar"}`, false, `{foo="bar",xxx="yyy"}`)
 	})
-	t.Run("labeldrop-regexp", func(t *testing.T) {
+	t.Run("labeldrop-regexp", func(_ *testing.T) {
 		f(`
 - action: labeldrop
   regex: ".*dropme.*"
@@ -734,7 +734,7 @@ func TestParsedRelabelConfigsApply(t *testing.T) {
   regex: ".*dropme.*"
 `, `{xxx="yyy",dropme-please="aaa",foo="bar"}`, false, `{foo="bar",xxx="yyy"}`)
 	})
-	t.Run("labelkeep", func(t *testing.T) {
+	t.Run("labelkeep", func(_ *testing.T) {
 		f(`
 - action: labelkeep
   regex: "keepme"
@@ -756,7 +756,7 @@ func TestParsedRelabelConfigsApply(t *testing.T) {
   regex: keepme
 `, `{keepme="aaa",aaaa="awef",keepme-aaa="234"}`, false, `{keepme="aaa"}`)
 	})
-	t.Run("labelkeep-regexp", func(t *testing.T) {
+	t.Run("labelkeep-regexp", func(_ *testing.T) {
 		f(`
 - action: labelkeep
   regex: "keepme.*"
@@ -766,7 +766,7 @@ func TestParsedRelabelConfigsApply(t *testing.T) {
   regex: "keepme.*"
 `, `{keepme="aaa",aaaa="awef",keepme-aaa="234"}`, false, `{keepme="aaa",keepme-aaa="234"}`)
 	})
-	t.Run("upper-lower-case", func(t *testing.T) {
+	t.Run("upper-lower-case", func(_ *testing.T) {
 		f(`
 - action: uppercase
   source_labels: ["foo"]
@@ -788,7 +788,7 @@ func TestParsedRelabelConfigsApply(t *testing.T) {
   target_label: baz
 `, `{qux="quux"}`, true, `{qux="quux"}`)
 	})
-	t.Run("graphite-match", func(t *testing.T) {
+	t.Run("graphite-match", func(_ *testing.T) {
 		f(`
 - action: graphite
   match: foo.*.baz
@@ -797,7 +797,7 @@ func TestParsedRelabelConfigsApply(t *testing.T) {
     job: ${1}-zz
 `, `foo.bar.baz`, true, `aaa{job="bar-zz"}`)
 	})
-	t.Run("graphite-mismatch", func(t *testing.T) {
+	t.Run("graphite-mismatch", func(_ *testing.T) {
 		f(`
 - action: graphite
   match: foo.*.baz
@@ -806,7 +806,7 @@ func TestParsedRelabelConfigsApply(t *testing.T) {
     job: ${1}-zz
 `, `foo.bar.bazz`, true, `foo.bar.bazz`)
 	})
-	t.Run("replacement-with-label-refs", func(t *testing.T) {
+	t.Run("replacement-with-label-refs", func(_ *testing.T) {
 		// no regex
 		f(`
 - target_label: abc
@@ -821,7 +821,7 @@ func TestParsedRelabelConfigsApply(t *testing.T) {
 `, `qwe{foo="bar",baz="aaa"}`, true, `qwe{abc="qwe.bar.aa",baz="aaa",foo="bar"}`)
 	})
 	// Check $ at the end of regex - see https://github.com/VictoriaMetrics/VictoriaMetrics/issues/3131
-	t.Run("replacement-with-$-at-the-end-of-regex", func(t *testing.T) {
+	t.Run("replacement-with-$-at-the-end-of-regex", func(_ *testing.T) {
 		f(`
 - target_label: xyz
   regex: "foo\\$$"
@@ -829,7 +829,7 @@ func TestParsedRelabelConfigsApply(t *testing.T) {
   source_labels: [xyz]
 `, `metric{xyz="foo$",a="b"}`, true, `metric{a="b",xyz="bar"}`)
 	})
-	t.Run("issue-3251", func(t *testing.T) {
+	t.Run("issue-3251", func(_ *testing.T) {
 		f(`
 - source_labels: [instance, container_label_com_docker_swarm_task_name]
   separator: ';'
@@ -966,7 +966,7 @@ func TestParsedRelabelConfigsApplyForMultipleSeries(t *testing.T) {
 		}
 	}
 
-	t.Run("drops one of series", func(t *testing.T) {
+	t.Run("drops one of series", func(_ *testing.T) {
 		f(`
 - action: drop
   if: '{__name__!~"smth"}' 
diff --git a/lib/promscrape/client.go b/lib/promscrape/client.go
index 7ec8c34e68..e6afaa34a9 100644
--- a/lib/promscrape/client.go
+++ b/lib/promscrape/client.go
@@ -92,8 +92,8 @@ func newClient(ctx context.Context, sw *ScrapeWork) *client {
 	if isTLS {
 		tlsCfg = sw.AuthConfig.NewTLSConfig()
 	}
-	setProxyHeaders := func(req *http.Request) {}
-	setFasthttpProxyHeaders := func(req *fasthttp.Request) {}
+	setProxyHeaders := func(_ *http.Request) {}
+	setFasthttpProxyHeaders := func(_ *fasthttp.Request) {}
 	proxyURL := sw.ProxyURL
 	if !isTLS && proxyURL.IsHTTPOrHTTPS() {
 		// Send full sw.ScrapeURL in requests to a proxy host for non-TLS scrape targets
@@ -155,7 +155,7 @@ func newClient(ctx context.Context, sw *ScrapeWork) *client {
 		Timeout: sw.ScrapeTimeout,
 	}
 	if sw.DenyRedirects {
-		sc.CheckRedirect = func(req *http.Request, via []*http.Request) error {
+		sc.CheckRedirect = func(_ *http.Request, _ []*http.Request) error {
 			return http.ErrUseLastResponse
 		}
 	}
diff --git a/lib/promscrape/discoveryutils/client.go b/lib/promscrape/discoveryutils/client.go
index 161c207af1..5df9a95f43 100644
--- a/lib/promscrape/discoveryutils/client.go
+++ b/lib/promscrape/discoveryutils/client.go
@@ -140,20 +140,20 @@ func NewClient(apiServer string, ac *promauth.Config, proxyURL *proxy.URL, proxy
 		},
 	}
 
-	setHTTPHeaders := func(req *http.Request) {}
+	setHTTPHeaders := func(_ *http.Request) {}
 	if ac != nil {
 		setHTTPHeaders = func(req *http.Request) {
 			ac.SetHeaders(req, true)
 		}
 	}
 	if httpCfg.FollowRedirects != nil && !*httpCfg.FollowRedirects {
-		checkRedirect := func(req *http.Request, via []*http.Request) error {
+		checkRedirect := func(_ *http.Request, _ []*http.Request) error {
 			return http.ErrUseLastResponse
 		}
 		client.CheckRedirect = checkRedirect
 		blockingClient.CheckRedirect = checkRedirect
 	}
-	setHTTPProxyHeaders := func(req *http.Request) {}
+	setHTTPProxyHeaders := func(_ *http.Request) {}
 	if proxyAC != nil {
 		setHTTPProxyHeaders = func(req *http.Request) {
 			proxyURL.SetHeaders(proxyAC, req)
diff --git a/lib/promscrape/scraper.go b/lib/promscrape/scraper.go
index f6bf3dd8d8..c6b2121eee 100644
--- a/lib/promscrape/scraper.go
+++ b/lib/promscrape/scraper.go
@@ -140,7 +140,7 @@ func runScraper(configFile string, pushData func(at *auth.Token, wr *prompbmarsh
 	scs.add("nomad_sd_configs", *nomad.SDCheckInterval, func(cfg *Config, swsPrev []*ScrapeWork) []*ScrapeWork { return cfg.getNomadSDScrapeWork(swsPrev) })
 	scs.add("openstack_sd_configs", *openstack.SDCheckInterval, func(cfg *Config, swsPrev []*ScrapeWork) []*ScrapeWork { return cfg.getOpenStackSDScrapeWork(swsPrev) })
 	scs.add("yandexcloud_sd_configs", *yandexcloud.SDCheckInterval, func(cfg *Config, swsPrev []*ScrapeWork) []*ScrapeWork { return cfg.getYandexCloudSDScrapeWork(swsPrev) })
-	scs.add("static_configs", 0, func(cfg *Config, swsPrev []*ScrapeWork) []*ScrapeWork { return cfg.getStaticScrapeWork() })
+	scs.add("static_configs", 0, func(cfg *Config, _ []*ScrapeWork) []*ScrapeWork { return cfg.getStaticScrapeWork() })
 
 	var tickerCh <-chan time.Time
 	if *configCheckInterval > 0 {
@@ -410,7 +410,7 @@ func (sg *scraperGroup) update(sws []*ScrapeWork) {
 		sg.scrapersStarted.Inc()
 		sg.wg.Add(1)
 		tsmGlobal.Register(&sc.sw)
-		go func(sw *ScrapeWork) {
+		go func(_ *ScrapeWork) {
 			defer func() {
 				sg.wg.Done()
 				close(sc.stoppedCh)
diff --git a/lib/promscrape/scrapework_test.go b/lib/promscrape/scrapework_test.go
index c454715284..3d5fc95b49 100644
--- a/lib/promscrape/scrapework_test.go
+++ b/lib/promscrape/scrapework_test.go
@@ -96,7 +96,7 @@ func TestScrapeWorkScrapeInternalFailure(t *testing.T) {
 
 	pushDataCalls := 0
 	var pushDataErr error
-	sw.PushData = func(at *auth.Token, wr *prompbmarshal.WriteRequest) {
+	sw.PushData = func(_ *auth.Token, wr *prompbmarshal.WriteRequest) {
 		if err := expectEqualTimeseries(wr.Timeseries, timeseriesExpected); err != nil {
 			pushDataErr = fmt.Errorf("unexpected data pushed: %w\ngot\n%#v\nwant\n%#v", err, wr.Timeseries, timeseriesExpected)
 		}
@@ -136,7 +136,7 @@ func TestScrapeWorkScrapeInternalSuccess(t *testing.T) {
 
 		pushDataCalls := 0
 		var pushDataErr error
-		sw.PushData = func(at *auth.Token, wr *prompbmarshal.WriteRequest) {
+		sw.PushData = func(_ *auth.Token, wr *prompbmarshal.WriteRequest) {
 			pushDataCalls++
 			if len(wr.Timeseries) > len(timeseriesExpected) {
 				pushDataErr = fmt.Errorf("too many time series obtained; got %d; want %d\ngot\n%+v\nwant\n%+v",
@@ -716,7 +716,7 @@ func TestSendStaleSeries(t *testing.T) {
 		defer common.StopUnmarshalWorkers()
 
 		var staleMarks int
-		sw.PushData = func(at *auth.Token, wr *prompbmarshal.WriteRequest) {
+		sw.PushData = func(_ *auth.Token, wr *prompbmarshal.WriteRequest) {
 			staleMarks += len(wr.Timeseries)
 		}
 		sw.sendStaleSeries(lastScrape, currScrape, 0, false)
diff --git a/lib/promscrape/scrapework_timing_test.go b/lib/promscrape/scrapework_timing_test.go
index bd83eb6ae1..047e1aa0bd 100644
--- a/lib/promscrape/scrapework_timing_test.go
+++ b/lib/promscrape/scrapework_timing_test.go
@@ -82,7 +82,7 @@ vm_tcplistener_write_calls_total{name="https", addr=":443"} 132356
 		var sw scrapeWork
 		sw.Config = &ScrapeWork{}
 		sw.ReadData = readDataFunc
-		sw.PushData = func(at *auth.Token, wr *prompbmarshal.WriteRequest) {}
+		sw.PushData = func(_ *auth.Token, _ *prompbmarshal.WriteRequest) {}
 		timestamp := int64(0)
 		for pb.Next() {
 			if err := sw.scrapeInternal(timestamp, timestamp); err != nil {
diff --git a/lib/protoparser/opentelemetry/stream/streamparser_timing_test.go b/lib/protoparser/opentelemetry/stream/streamparser_timing_test.go
index 368c0309d3..270e03429b 100644
--- a/lib/protoparser/opentelemetry/stream/streamparser_timing_test.go
+++ b/lib/protoparser/opentelemetry/stream/streamparser_timing_test.go
@@ -27,7 +27,7 @@ func BenchmarkParseStream(b *testing.B) {
 		}
 
 		for p.Next() {
-			err := ParseStream(bytes.NewBuffer(data), false, func(tss []prompbmarshal.TimeSeries) error {
+			err := ParseStream(bytes.NewBuffer(data), false, func(_ []prompbmarshal.TimeSeries) error {
 				return nil
 			})
 			if err != nil {
diff --git a/lib/storage/tag_filters.go b/lib/storage/tag_filters.go
index bc74925901..4b9a3df4fa 100644
--- a/lib/storage/tag_filters.go
+++ b/lib/storage/tag_filters.go
@@ -620,7 +620,7 @@ const (
 func getOptimizedReMatchFuncExt(reMatch func(b []byte) bool, sre *syntax.Regexp) (func(b []byte) bool, string, uint64) {
 	if isDotStar(sre) {
 		// '.*'
-		return func(b []byte) bool {
+		return func(_ []byte) bool {
 			return true
 		}, "", fullMatchCost
 	}
diff --git a/lib/storage/tag_filters_test.go b/lib/storage/tag_filters_test.go
index 730b02f414..1eedeb4542 100644
--- a/lib/storage/tag_filters_test.go
+++ b/lib/storage/tag_filters_test.go
@@ -784,7 +784,7 @@ func TestTagFilterMatchSuffix(t *testing.T) {
 		}
 	}
 
-	t.Run("plain-value", func(t *testing.T) {
+	t.Run("plain-value", func(_ *testing.T) {
 		value := "xx"
 		isNegative := false
 		isRegexp := false
@@ -796,7 +796,7 @@ func TestTagFilterMatchSuffix(t *testing.T) {
 		mismatch("foo")
 		mismatch("xx")
 	})
-	t.Run("negative-plain-value", func(t *testing.T) {
+	t.Run("negative-plain-value", func(_ *testing.T) {
 		value := "xx"
 		isNegative := true
 		isRegexp := false
@@ -811,7 +811,7 @@ func TestTagFilterMatchSuffix(t *testing.T) {
 		match("xxx")
 		match("xxfoo")
 	})
-	t.Run("regexp-convert-to-plain-value", func(t *testing.T) {
+	t.Run("regexp-convert-to-plain-value", func(_ *testing.T) {
 		value := "http"
 		isNegative := false
 		isRegexp := true
@@ -824,7 +824,7 @@ func TestTagFilterMatchSuffix(t *testing.T) {
 		mismatch("http")
 		mismatch("foobar")
 	})
-	t.Run("negative-regexp-convert-to-plain-value", func(t *testing.T) {
+	t.Run("negative-regexp-convert-to-plain-value", func(_ *testing.T) {
 		value := "http"
 		isNegative := true
 		isRegexp := true
@@ -839,7 +839,7 @@ func TestTagFilterMatchSuffix(t *testing.T) {
 		match("httpx")
 		match("foobar")
 	})
-	t.Run("regexp-prefix-any-suffix", func(t *testing.T) {
+	t.Run("regexp-prefix-any-suffix", func(_ *testing.T) {
 		value := "http.*"
 		isNegative := false
 		isRegexp := true
@@ -852,7 +852,7 @@ func TestTagFilterMatchSuffix(t *testing.T) {
 		match("http")
 		match("foobar")
 	})
-	t.Run("negative-regexp-prefix-any-suffix", func(t *testing.T) {
+	t.Run("negative-regexp-prefix-any-suffix", func(_ *testing.T) {
 		value := "http.*"
 		isNegative := true
 		isRegexp := true
@@ -867,7 +867,7 @@ func TestTagFilterMatchSuffix(t *testing.T) {
 		mismatch("httpsdf")
 		mismatch("foobar")
 	})
-	t.Run("regexp-prefix-contains-suffix", func(t *testing.T) {
+	t.Run("regexp-prefix-contains-suffix", func(_ *testing.T) {
 		value := "http.*foo.*"
 		isNegative := false
 		isRegexp := true
@@ -883,7 +883,7 @@ func TestTagFilterMatchSuffix(t *testing.T) {
 		match("xfoobar")
 		match("xfoo")
 	})
-	t.Run("negative-regexp-prefix-contains-suffix", func(t *testing.T) {
+	t.Run("negative-regexp-prefix-contains-suffix", func(_ *testing.T) {
 		value := "http.*foo.*"
 		isNegative := true
 		isRegexp := true
@@ -903,7 +903,7 @@ func TestTagFilterMatchSuffix(t *testing.T) {
 		mismatch("httpxfoobar")
 		mismatch("httpxfoo")
 	})
-	t.Run("negative-regexp-noprefix-contains-suffix", func(t *testing.T) {
+	t.Run("negative-regexp-noprefix-contains-suffix", func(_ *testing.T) {
 		value := ".*foo.*"
 		isNegative := true
 		isRegexp := true
@@ -919,7 +919,7 @@ func TestTagFilterMatchSuffix(t *testing.T) {
 		mismatch("xfoobar")
 		mismatch("xfoo")
 	})
-	t.Run("regexp-prefix-special-suffix", func(t *testing.T) {
+	t.Run("regexp-prefix-special-suffix", func(_ *testing.T) {
 		value := "http.*bar"
 		isNegative := false
 		isRegexp := true
@@ -934,7 +934,7 @@ func TestTagFilterMatchSuffix(t *testing.T) {
 		match("foobar")
 		mismatch("foobarx")
 	})
-	t.Run("negative-regexp-prefix-special-suffix", func(t *testing.T) {
+	t.Run("negative-regexp-prefix-special-suffix", func(_ *testing.T) {
 		value := "http.*bar"
 		isNegative := true
 		isRegexp := true
@@ -951,7 +951,7 @@ func TestTagFilterMatchSuffix(t *testing.T) {
 		match("httpxybarx")
 		mismatch("ahttpxybar")
 	})
-	t.Run("negative-regexp-noprefix-special-suffix", func(t *testing.T) {
+	t.Run("negative-regexp-noprefix-special-suffix", func(_ *testing.T) {
 		value := ".*bar"
 		isNegative := true
 		isRegexp := true
@@ -1002,7 +1002,7 @@ func TestTagFilterMatchSuffix(t *testing.T) {
 		mismatch("bar")
 		match("xhttpbar")
 	})
-	t.Run("regexp-iflag-no-suffix", func(t *testing.T) {
+	t.Run("regexp-iflag-no-suffix", func(_ *testing.T) {
 		value := "(?i)http"
 		isNegative := false
 		isRegexp := true
@@ -1020,7 +1020,7 @@ func TestTagFilterMatchSuffix(t *testing.T) {
 		mismatch("xhttp://")
 		mismatch("hTTp://foobar.com")
 	})
-	t.Run("negative-regexp-iflag-no-suffix", func(t *testing.T) {
+	t.Run("negative-regexp-iflag-no-suffix", func(_ *testing.T) {
 		value := "(?i)http"
 		isNegative := true
 		isRegexp := true
@@ -1038,7 +1038,7 @@ func TestTagFilterMatchSuffix(t *testing.T) {
 		match("xhttp://")
 		match("hTTp://foobar.com")
 	})
-	t.Run("regexp-iflag-any-suffix", func(t *testing.T) {
+	t.Run("regexp-iflag-any-suffix", func(_ *testing.T) {
 		value := "(?i)http.*"
 		isNegative := false
 		isRegexp := true
@@ -1055,7 +1055,7 @@ func TestTagFilterMatchSuffix(t *testing.T) {
 		mismatch("xhttp")
 		mismatch("xhttp://")
 	})
-	t.Run("negative-regexp-iflag-any-suffix", func(t *testing.T) {
+	t.Run("negative-regexp-iflag-any-suffix", func(_ *testing.T) {
 		value := "(?i)http.*"
 		isNegative := true
 		isRegexp := true
diff --git a/lib/storage/tag_filters_timing_test.go b/lib/storage/tag_filters_timing_test.go
index 3789560fef..f9db7facc6 100644
--- a/lib/storage/tag_filters_timing_test.go
+++ b/lib/storage/tag_filters_timing_test.go
@@ -411,7 +411,7 @@ func BenchmarkOptimizedReMatchCost(b *testing.B) {
 		})
 	})
 	b.Run(".*", func(b *testing.B) {
-		reMatch := func(b []byte) bool {
+		reMatch := func(_ []byte) bool {
 			return true
 		}
 		suffix := []byte("foo1.bar.baz.sss.ddd")
diff --git a/lib/streamaggr/streamaggr_test.go b/lib/streamaggr/streamaggr_test.go
index 33924c8e21..f1a4d30106 100644
--- a/lib/streamaggr/streamaggr_test.go
+++ b/lib/streamaggr/streamaggr_test.go
@@ -17,7 +17,7 @@ import (
 func TestAggregatorsFailure(t *testing.T) {
 	f := func(config string) {
 		t.Helper()
-		pushFunc := func(tss []prompbmarshal.TimeSeries) {
+		pushFunc := func(_ []prompbmarshal.TimeSeries) {
 			panic(fmt.Errorf("pushFunc shouldn't be called"))
 		}
 		a, err := NewAggregatorsFromData([]byte(config), pushFunc, 0)
@@ -123,7 +123,7 @@ func TestAggregatorsEqual(t *testing.T) {
 	f := func(a, b string, expectedResult bool) {
 		t.Helper()
 
-		pushFunc := func(tss []prompbmarshal.TimeSeries) {}
+		pushFunc := func(_ []prompbmarshal.TimeSeries) {}
 		aa, err := NewAggregatorsFromData([]byte(a), pushFunc, 0)
 		if err != nil {
 			t.Fatalf("cannot initialize aggregators: %s", err)
diff --git a/lib/streamaggr/streamaggr_timing_test.go b/lib/streamaggr/streamaggr_timing_test.go
index 51f95455b0..638b775470 100644
--- a/lib/streamaggr/streamaggr_timing_test.go
+++ b/lib/streamaggr/streamaggr_timing_test.go
@@ -38,7 +38,7 @@ func benchmarkAggregatorsPush(b *testing.B, output string) {
   outputs: [%q]
 `, output)
 	pushCalls := 0
-	pushFunc := func(tss []prompbmarshal.TimeSeries) {
+	pushFunc := func(_ []prompbmarshal.TimeSeries) {
 		pushCalls++
 		if pushCalls > 1 {
 			panic(fmt.Errorf("pushFunc is expected to be called exactly once at MustStop"))
diff --git a/lib/uint64set/uint64set_test.go b/lib/uint64set/uint64set_test.go
index f6c7ed2794..d835382b94 100644
--- a/lib/uint64set/uint64set_test.go
+++ b/lib/uint64set/uint64set_test.go
@@ -403,7 +403,7 @@ func testSetBasicOps(t *testing.T, itemsCount int) {
 
 		// Verify fast stop
 		calls := 0
-		s.ForEach(func(part []uint64) bool {
+		s.ForEach(func(_ []uint64) bool {
 			calls++
 			return false
 		})
@@ -413,7 +413,7 @@ func testSetBasicOps(t *testing.T, itemsCount int) {
 
 		// Verify ForEach on nil set.
 		var s1 *Set
-		s1.ForEach(func(part []uint64) bool {
+		s1.ForEach(func(_ []uint64) bool {
 			t.Fatalf("callback shouldn't be called on empty set")
 			return true
 		})