all: consistently use %w instead of %s in when error is passed to fmt.Errorf()

This allows consistently using errors.Is() for verifying whether the given error wraps some other known error.
This commit is contained in:
Aliaksandr Valialkin 2023-10-25 21:24:01 +02:00
parent 305c96e384
commit 42dd71bb63
No known key found for this signature in database
GPG key ID: A72BEC6CD3D0DED1
42 changed files with 83 additions and 83 deletions

View file

@ -120,10 +120,10 @@ func compressData(s string) string {
var bb bytes.Buffer var bb bytes.Buffer
zw := gzip.NewWriter(&bb) zw := gzip.NewWriter(&bb)
if _, err := zw.Write([]byte(s)); err != nil { if _, err := zw.Write([]byte(s)); err != nil {
panic(fmt.Errorf("unexpected error when compressing data: %s", err)) panic(fmt.Errorf("unexpected error when compressing data: %w", err))
} }
if err := zw.Close(); err != nil { if err := zw.Close(); err != nil {
panic(fmt.Errorf("unexpected error when closing gzip writer: %s", err)) panic(fmt.Errorf("unexpected error when closing gzip writer: %w", err))
} }
return bb.String() return bb.String()
} }

View file

@ -43,7 +43,7 @@ func benchmarkReadBulkRequest(b *testing.B, isGzip bool) {
r.Reset(dataBytes) r.Reset(dataBytes)
_, err := readBulkRequest(r, isGzip, timeField, msgField, processLogMessage) _, err := readBulkRequest(r, isGzip, timeField, msgField, processLogMessage)
if err != nil { if err != nil {
panic(fmt.Errorf("unexpected error: %s", err)) panic(fmt.Errorf("unexpected error: %w", err))
} }
} }
}) })

View file

@ -29,7 +29,7 @@ func benchmarkParseJSONRequest(b *testing.B, streams, rows, labels int) {
for pb.Next() { for pb.Next() {
_, err := parseJSONRequest(data, func(timestamp int64, fields []logstorage.Field) {}) _, err := parseJSONRequest(data, func(timestamp int64, fields []logstorage.Field) {})
if err != nil { if err != nil {
panic(fmt.Errorf("unexpected error: %s", err)) panic(fmt.Errorf("unexpected error: %w", err))
} }
} }
}) })

View file

@ -84,7 +84,7 @@ func parseProtobufRequest(data []byte, processLogMessage func(timestamp int64, f
err = req.Unmarshal(bb.B) err = req.Unmarshal(bb.B)
if err != nil { if err != nil {
return 0, fmt.Errorf("cannot parse request body: %s", err) return 0, fmt.Errorf("cannot parse request body: %w", err)
} }
var commonFields []logstorage.Field var commonFields []logstorage.Field
@ -97,7 +97,7 @@ func parseProtobufRequest(data []byte, processLogMessage func(timestamp int64, f
// Labels are same for all entries in the stream. // Labels are same for all entries in the stream.
commonFields, err = parsePromLabels(commonFields[:0], stream.Labels) commonFields, err = parsePromLabels(commonFields[:0], stream.Labels)
if err != nil { if err != nil {
return rowsIngested, fmt.Errorf("cannot parse stream labels %q: %s", stream.Labels, err) return rowsIngested, fmt.Errorf("cannot parse stream labels %q: %w", stream.Labels, err)
} }
fields := commonFields fields := commonFields

View file

@ -31,7 +31,7 @@ func benchmarkParseProtobufRequest(b *testing.B, streams, rows, labels int) {
for pb.Next() { for pb.Next() {
_, err := parseProtobufRequest(body, func(timestamp int64, fields []logstorage.Field) {}) _, err := parseProtobufRequest(body, func(timestamp int64, fields []logstorage.Field) {})
if err != nil { if err != nil {
panic(fmt.Errorf("unexpected error: %s", err)) panic(fmt.Errorf("unexpected error: %w", err))
} }
} }
}) })

View file

@ -236,7 +236,7 @@ func ParseSilent(pathPatterns []string, validateTplFn ValidateTplFn, validateExp
files, err := readFromFS(pathPatterns) files, err := readFromFS(pathPatterns)
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to read from the config: %s", err) return nil, fmt.Errorf("failed to read from the config: %w", err)
} }
return parse(files, validateTplFn, validateExpressions) return parse(files, validateTplFn, validateExpressions)
} }
@ -245,11 +245,11 @@ func ParseSilent(pathPatterns []string, validateTplFn ValidateTplFn, validateExp
func Parse(pathPatterns []string, validateTplFn ValidateTplFn, validateExpressions bool) ([]Group, error) { func Parse(pathPatterns []string, validateTplFn ValidateTplFn, validateExpressions bool) ([]Group, error) {
files, err := readFromFS(pathPatterns) files, err := readFromFS(pathPatterns)
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to read from the config: %s", err) return nil, fmt.Errorf("failed to read from the config: %w", err)
} }
groups, err := parse(files, validateTplFn, validateExpressions) groups, err := parse(files, validateTplFn, validateExpressions)
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to parse %s: %s", pathPatterns, err) return nil, fmt.Errorf("failed to parse %s: %w", pathPatterns, err)
} }
if len(groups) < 1 { if len(groups) < 1 {
cLogger.Warnf("no groups found in %s", strings.Join(pathPatterns, ";")) cLogger.Warnf("no groups found in %s", strings.Join(pathPatterns, ";"))

View file

@ -49,7 +49,7 @@ func (fs *FS) Read(files []string) (map[string][]byte, error) {
path, resp.StatusCode, http.StatusOK, data) path, resp.StatusCode, http.StatusOK, data)
} }
if err != nil { if err != nil {
return nil, fmt.Errorf("cannot read %q: %s", path, err) return nil, fmt.Errorf("cannot read %q: %w", path, err)
} }
result[path] = data result[path] = data
} }

View file

@ -117,7 +117,7 @@ func Init(extraParams url.Values) (QuerierBuilder, error) {
} }
_, err = authCfg.GetAuthHeader() _, err = authCfg.GetAuthHeader()
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to set request auth header to datasource %q: %s", *addr, err) return nil, fmt.Errorf("failed to set request auth header to datasource %q: %w", *addr, err)
} }
return &VMStorage{ return &VMStorage{

View file

@ -219,7 +219,7 @@ func (s *VMStorage) do(ctx context.Context, req *http.Request) (*http.Response,
func (s *VMStorage) newQueryRangeRequest(query string, start, end time.Time) (*http.Request, error) { func (s *VMStorage) newQueryRangeRequest(query string, start, end time.Time) (*http.Request, error) {
req, err := s.newRequest() req, err := s.newRequest()
if err != nil { if err != nil {
return nil, fmt.Errorf("cannot create query_range request to datasource %q: %s", s.datasourceURL, err) return nil, fmt.Errorf("cannot create query_range request to datasource %q: %w", s.datasourceURL, err)
} }
s.setPrometheusRangeReqParams(req, query, start, end) s.setPrometheusRangeReqParams(req, query, start, end)
return req, nil return req, nil
@ -228,7 +228,7 @@ func (s *VMStorage) newQueryRangeRequest(query string, start, end time.Time) (*h
func (s *VMStorage) newQueryRequest(query string, ts time.Time) (*http.Request, error) { func (s *VMStorage) newQueryRequest(query string, ts time.Time) (*http.Request, error) {
req, err := s.newRequest() req, err := s.newRequest()
if err != nil { if err != nil {
return nil, fmt.Errorf("cannot create query request to datasource %q: %s", s.datasourceURL, err) return nil, fmt.Errorf("cannot create query request to datasource %q: %w", s.datasourceURL, err)
} }
switch s.dataSourceType { switch s.dataSourceType {
case "", datasourcePrometheus: case "", datasourcePrometheus:

View file

@ -112,14 +112,14 @@ func parsePrometheusResponse(req *http.Request, resp *http.Response) (res Result
return res, fmt.Errorf("response error, query: %s, errorType: %s, error: %s", req.URL.Redacted(), r.ErrorType, r.Error) return res, fmt.Errorf("response error, query: %s, errorType: %s, error: %s", req.URL.Redacted(), r.ErrorType, r.Error)
} }
if r.Status != statusSuccess { if r.Status != statusSuccess {
return res, fmt.Errorf("unknown status: %s, Expected success or error ", r.Status) return res, fmt.Errorf("unknown status: %s, Expected success or error", r.Status)
} }
var parseFn func() ([]Metric, error) var parseFn func() ([]Metric, error)
switch r.Data.ResultType { switch r.Data.ResultType {
case rtVector: case rtVector:
var pi promInstant var pi promInstant
if err := json.Unmarshal(r.Data.Result, &pi.Result); err != nil { if err := json.Unmarshal(r.Data.Result, &pi.Result); err != nil {
return res, fmt.Errorf("umarshal err %s; \n %#v", err, string(r.Data.Result)) return res, fmt.Errorf("unmarshal err %w; \n %#v", err, string(r.Data.Result))
} }
parseFn = pi.metrics parseFn = pi.metrics
case rtMatrix: case rtMatrix:

View file

@ -87,7 +87,7 @@ func (cw *configWatcher) reload(path string) error {
func (cw *configWatcher) add(typeK TargetType, interval time.Duration, labelsFn getLabels) error { func (cw *configWatcher) add(typeK TargetType, interval time.Duration, labelsFn getLabels) error {
targets, errors := targetsFromLabels(labelsFn, cw.cfg, cw.genFn) targets, errors := targetsFromLabels(labelsFn, cw.cfg, cw.genFn)
for _, err := range errors { for _, err := range errors {
return fmt.Errorf("failed to init notifier for %q: %s", typeK, err) return fmt.Errorf("failed to init notifier for %q: %w", typeK, err)
} }
cw.setTargets(typeK, targets) cw.setTargets(typeK, targets)
@ -107,7 +107,7 @@ func (cw *configWatcher) add(typeK TargetType, interval time.Duration, labelsFn
} }
updateTargets, errors := targetsFromLabels(labelsFn, cw.cfg, cw.genFn) updateTargets, errors := targetsFromLabels(labelsFn, cw.cfg, cw.genFn)
for _, err := range errors { for _, err := range errors {
logger.Errorf("failed to init notifier for %q: %s", typeK, err) logger.Errorf("failed to init notifier for %q: %w", typeK, err)
} }
cw.setTargets(typeK, updateTargets) cw.setTargets(typeK, updateTargets)
} }
@ -118,7 +118,7 @@ func (cw *configWatcher) add(typeK TargetType, interval time.Duration, labelsFn
func targetsFromLabels(labelsFn getLabels, cfg *Config, genFn AlertURLGenerator) ([]Target, []error) { func targetsFromLabels(labelsFn getLabels, cfg *Config, genFn AlertURLGenerator) ([]Target, []error) {
metaLabels, err := labelsFn() metaLabels, err := labelsFn()
if err != nil { if err != nil {
return nil, []error{fmt.Errorf("failed to get labels: %s", err)} return nil, []error{fmt.Errorf("failed to get labels: %w", err)}
} }
var targets []Target var targets []Target
var errors []error var errors []error
@ -167,11 +167,11 @@ func (cw *configWatcher) start() error {
for _, target := range cfg.Targets { for _, target := range cfg.Targets {
address, labels, err := parseLabels(target, nil, cw.cfg) address, labels, err := parseLabels(target, nil, cw.cfg)
if err != nil { if err != nil {
return fmt.Errorf("failed to parse labels for target %q: %s", target, err) return fmt.Errorf("failed to parse labels for target %q: %w", target, err)
} }
notifier, err := NewAlertManager(address, cw.genFn, httpCfg, cw.cfg.parsedAlertRelabelConfigs, cw.cfg.Timeout.Duration()) notifier, err := NewAlertManager(address, cw.genFn, httpCfg, cw.cfg.parsedAlertRelabelConfigs, cw.cfg.Timeout.Duration())
if err != nil { if err != nil {
return fmt.Errorf("failed to init alertmanager for addr %q: %s", address, err) return fmt.Errorf("failed to init alertmanager for addr %q: %w", address, err)
} }
targets = append(targets, Target{ targets = append(targets, Target{
Notifier: notifier, Notifier: notifier,
@ -189,14 +189,14 @@ func (cw *configWatcher) start() error {
sdc := &cw.cfg.ConsulSDConfigs[i] sdc := &cw.cfg.ConsulSDConfigs[i]
targetLabels, err := sdc.GetLabels(cw.cfg.baseDir) targetLabels, err := sdc.GetLabels(cw.cfg.baseDir)
if err != nil { if err != nil {
return nil, fmt.Errorf("got labels err: %s", err) return nil, fmt.Errorf("got labels err: %w", err)
} }
labels = append(labels, targetLabels...) labels = append(labels, targetLabels...)
} }
return labels, nil return labels, nil
}) })
if err != nil { if err != nil {
return fmt.Errorf("failed to start consulSD discovery: %s", err) return fmt.Errorf("failed to start consulSD discovery: %w", err)
} }
} }
@ -207,14 +207,14 @@ func (cw *configWatcher) start() error {
sdc := &cw.cfg.DNSSDConfigs[i] sdc := &cw.cfg.DNSSDConfigs[i]
targetLabels, err := sdc.GetLabels(cw.cfg.baseDir) targetLabels, err := sdc.GetLabels(cw.cfg.baseDir)
if err != nil { if err != nil {
return nil, fmt.Errorf("got labels err: %s", err) return nil, fmt.Errorf("got labels err: %w", err)
} }
labels = append(labels, targetLabels...) labels = append(labels, targetLabels...)
} }
return labels, nil return labels, nil
}) })
if err != nil { if err != nil {
return fmt.Errorf("failed to start DNSSD discovery: %s", err) return fmt.Errorf("failed to start DNSSD discovery: %w", err)
} }
} }
return nil return nil

View file

@ -90,7 +90,7 @@ func Init(gen AlertURLGenerator, extLabels map[string]string, extURL string) (fu
externalLabels = extLabels externalLabels = extLabels
eu, err := url.Parse(externalURL) eu, err := url.Parse(externalURL)
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to parse external URL: %s", err) return nil, fmt.Errorf("failed to parse external URL: %w", err)
} }
templates.UpdateWithFuncs(templates.FuncsWithExternalURL(eu)) templates.UpdateWithFuncs(templates.FuncsWithExternalURL(eu))
@ -116,7 +116,7 @@ func Init(gen AlertURLGenerator, extLabels map[string]string, extURL string) (fu
if len(*addrs) > 0 { if len(*addrs) > 0 {
notifiers, err := notifiersFromFlags(gen) notifiers, err := notifiersFromFlags(gen)
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to create notifier from flag values: %s", err) return nil, fmt.Errorf("failed to create notifier from flag values: %w", err)
} }
staticNotifiersFn = func() []Notifier { staticNotifiersFn = func() []Notifier {
return notifiers return notifiers
@ -126,7 +126,7 @@ func Init(gen AlertURLGenerator, extLabels map[string]string, extURL string) (fu
cw, err = newWatcher(*configPath, gen) cw, err = newWatcher(*configPath, gen)
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to init config watcher: %s", err) return nil, fmt.Errorf("failed to init config watcher: %w", err)
} }
return cw.notifiers, nil return cw.notifiers, nil
} }

View file

@ -36,11 +36,11 @@ func replay(groupsCfg []config.Group, qb datasource.QuerierBuilder, rw remotewri
} }
tFrom, err := time.Parse(time.RFC3339, *replayFrom) tFrom, err := time.Parse(time.RFC3339, *replayFrom)
if err != nil { if err != nil {
return fmt.Errorf("failed to parse %q: %s", *replayFrom, err) return fmt.Errorf("failed to parse %q: %w", *replayFrom, err)
} }
tTo, err := time.Parse(time.RFC3339, *replayTo) tTo, err := time.Parse(time.RFC3339, *replayTo)
if err != nil { if err != nil {
return fmt.Errorf("failed to parse %q: %s", *replayTo, err) return fmt.Errorf("failed to parse %q: %w", *replayTo, err)
} }
if !tTo.After(tFrom) { if !tTo.After(tFrom) {
return fmt.Errorf("replay.timeTo must be bigger than replay.timeFrom") return fmt.Errorf("replay.timeTo must be bigger than replay.timeFrom")

View file

@ -269,7 +269,7 @@ func (ar *AlertingRule) toLabels(m datasource.Metric, qFn templates.QueryFn) (*l
Expr: ar.Expr, Expr: ar.Expr,
}) })
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to expand labels: %s", err) return nil, fmt.Errorf("failed to expand labels: %w", err)
} }
for k, v := range extraLabels { for k, v := range extraLabels {
ls.processed[k] = v ls.processed[k] = v
@ -310,7 +310,7 @@ func (ar *AlertingRule) execRange(ctx context.Context, start, end time.Time) ([]
for _, s := range res.Data { for _, s := range res.Data {
a, err := ar.newAlert(s, nil, time.Time{}, qFn) // initial alert a, err := ar.newAlert(s, nil, time.Time{}, qFn) // initial alert
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to create alert: %s", err) return nil, fmt.Errorf("failed to create alert: %w", err)
} }
if ar.For == 0 { // if alert is instant if ar.For == 0 { // if alert is instant
a.State = notifier.StateFiring a.State = notifier.StateFiring
@ -388,7 +388,7 @@ func (ar *AlertingRule) exec(ctx context.Context, ts time.Time, limit int) ([]pr
for _, m := range res.Data { for _, m := range res.Data {
ls, err := ar.toLabels(m, qFn) ls, err := ar.toLabels(m, qFn)
if err != nil { if err != nil {
curState.Err = fmt.Errorf("failed to expand labels: %s", err) curState.Err = fmt.Errorf("failed to expand labels: %w", err)
return nil, curState.Err return nil, curState.Err
} }
h := hash(ls.processed) h := hash(ls.processed)
@ -513,7 +513,7 @@ func (ar *AlertingRule) newAlert(m datasource.Metric, ls *labelSet, start time.T
if ls == nil { if ls == nil {
ls, err = ar.toLabels(m, qFn) ls, err = ar.toLabels(m, qFn)
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to expand labels: %s", err) return nil, fmt.Errorf("failed to expand labels: %w", err)
} }
} }
a := &notifier.Alert{ a := &notifier.Alert{

View file

@ -166,7 +166,7 @@ func replayRule(r Rule, start, end time.Time, rw remotewrite.RWClient, replayRul
var n int var n int
for _, ts := range tss { for _, ts := range tss {
if err := rw.Push(ts); err != nil { if err := rw.Push(ts); err != nil {
return n, fmt.Errorf("remote write failure: %s", err) return n, fmt.Errorf("remote write failure: %w", err)
} }
n += len(ts.Samples) n += len(ts.Samples)
} }

View file

@ -147,11 +147,11 @@ func (rh *requestHandler) handler(w http.ResponseWriter, r *http.Request) bool {
func (rh *requestHandler) getRule(r *http.Request) (apiRule, error) { func (rh *requestHandler) getRule(r *http.Request) (apiRule, error) {
groupID, err := strconv.ParseUint(r.FormValue(paramGroupID), 10, 64) groupID, err := strconv.ParseUint(r.FormValue(paramGroupID), 10, 64)
if err != nil { if err != nil {
return apiRule{}, fmt.Errorf("failed to read %q param: %s", paramGroupID, err) return apiRule{}, fmt.Errorf("failed to read %q param: %w", paramGroupID, err)
} }
ruleID, err := strconv.ParseUint(r.FormValue(paramRuleID), 10, 64) ruleID, err := strconv.ParseUint(r.FormValue(paramRuleID), 10, 64)
if err != nil { if err != nil {
return apiRule{}, fmt.Errorf("failed to read %q param: %s", paramRuleID, err) return apiRule{}, fmt.Errorf("failed to read %q param: %w", paramRuleID, err)
} }
obj, err := rh.m.ruleAPI(groupID, ruleID) obj, err := rh.m.ruleAPI(groupID, ruleID)
if err != nil { if err != nil {
@ -163,11 +163,11 @@ func (rh *requestHandler) getRule(r *http.Request) (apiRule, error) {
func (rh *requestHandler) getAlert(r *http.Request) (*apiAlert, error) { func (rh *requestHandler) getAlert(r *http.Request) (*apiAlert, error) {
groupID, err := strconv.ParseUint(r.FormValue(paramGroupID), 10, 64) groupID, err := strconv.ParseUint(r.FormValue(paramGroupID), 10, 64)
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to read %q param: %s", paramGroupID, err) return nil, fmt.Errorf("failed to read %q param: %w", paramGroupID, err)
} }
alertID, err := strconv.ParseUint(r.FormValue(paramAlertID), 10, 64) alertID, err := strconv.ParseUint(r.FormValue(paramAlertID), 10, 64)
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to read %q param: %s", paramAlertID, err) return nil, fmt.Errorf("failed to read %q param: %w", paramAlertID, err)
} }
a, err := rh.m.alertAPI(groupID, alertID) a, err := rh.m.alertAPI(groupID, alertID)
if err != nil { if err != nil {

View file

@ -168,7 +168,7 @@ See the docs at https://docs.victoriametrics.com/vmbackup.html .
func newSrcFS() (*fslocal.FS, error) { func newSrcFS() (*fslocal.FS, error) {
if err := snapshot.Validate(*snapshotName); err != nil { if err := snapshot.Validate(*snapshotName); err != nil {
return nil, fmt.Errorf("invalid -snapshotName=%q: %s", *snapshotName, err) return nil, fmt.Errorf("invalid -snapshotName=%q: %w", *snapshotName, err)
} }
snapshotPath := filepath.Join(*storageDataPath, "snapshots", *snapshotName) snapshotPath := filepath.Join(*storageDataPath, "snapshots", *snapshotName)

View file

@ -81,7 +81,7 @@ var funcs = func() map[string]*funcInfo {
var m map[string]*funcInfo var m map[string]*funcInfo
if err := json.Unmarshal(funcsJSON, &m); err != nil { if err := json.Unmarshal(funcsJSON, &m); err != nil {
// Do not use logger.Panicf, since it isn't ready yet. // Do not use logger.Panicf, since it isn't ready yet.
panic(fmt.Errorf("cannot parse funcsJSON: %s", err)) panic(fmt.Errorf("cannot parse funcsJSON: %w", err))
} }
return m return m
}() }()

View file

@ -197,7 +197,7 @@ func ExportCSVHandler(startTime time.Time, w http.ResponseWriter, r *http.Reques
return err return err
} }
if err := b.UnmarshalData(); err != nil { if err := b.UnmarshalData(); err != nil {
return fmt.Errorf("cannot unmarshal block during export: %s", err) return fmt.Errorf("cannot unmarshal block during export: %w", err)
} }
xb := exportBlockPool.Get().(*exportBlock) xb := exportBlockPool.Get().(*exportBlock)
xb.mn = mn xb.mn = mn
@ -407,7 +407,7 @@ func exportHandler(qt *querytracer.Tracer, w http.ResponseWriter, cp *commonPara
return err return err
} }
if err := b.UnmarshalData(); err != nil { if err := b.UnmarshalData(); err != nil {
return fmt.Errorf("cannot unmarshal block during export: %s", err) return fmt.Errorf("cannot unmarshal block during export: %w", err)
} }
xb := exportBlockPool.Get().(*exportBlock) xb := exportBlockPool.Get().(*exportBlock)
xb.mn = mn xb.mn = mn

View file

@ -418,7 +418,7 @@ func ReadFileOrHTTP(path string) ([]byte, error) {
return nil, fmt.Errorf("unexpected status code when fetching %q: %d, expecting %d; response: %q", path, resp.StatusCode, http.StatusOK, data) return nil, fmt.Errorf("unexpected status code when fetching %q: %d, expecting %d; response: %q", path, resp.StatusCode, http.StatusOK, data)
} }
if err != nil { if err != nil {
return nil, fmt.Errorf("cannot read %q: %s", path, err) return nil, fmt.Errorf("cannot read %q: %w", path, err)
} }
return data, nil return data, nil
} }

View file

@ -204,7 +204,7 @@ func gzipHandler(s *server, rh RequestHandler) http.HandlerFunc {
var gzipHandlerWrapper = func() func(http.Handler) http.HandlerFunc { var gzipHandlerWrapper = func() func(http.Handler) http.HandlerFunc {
hw, err := gzhttp.NewWrapper(gzhttp.CompressionLevel(1)) hw, err := gzhttp.NewWrapper(gzhttp.CompressionLevel(1))
if err != nil { if err != nil {
panic(fmt.Errorf("BUG: cannot initialize gzip http wrapper: %s", err)) panic(fmt.Errorf("BUG: cannot initialize gzip http wrapper: %w", err))
} }
return hw return hw
}() }()

View file

@ -899,7 +899,7 @@ func unmarshalCommonPrefix(dstTenantID *TenantID, src []byte) ([]byte, byte, err
src = src[1:] src = src[1:]
tail, err := dstTenantID.unmarshal(src) tail, err := dstTenantID.unmarshal(src)
if err != nil { if err != nil {
return nil, 0, fmt.Errorf("cannot unmarshal tenantID: %s", err) return nil, 0, fmt.Errorf("cannot unmarshal tenantID: %w", err)
} }
return tail, prefix, nil return tail, prefix, nil
} }

View file

@ -310,7 +310,7 @@ func TestStorageRunQuery(t *testing.T) {
func mustParseQuery(query string) *Query { func mustParseQuery(query string) *Query {
q, err := ParseQuery(query) q, err := ParseQuery(query)
if err != nil { if err != nil {
panic(fmt.Errorf("BUG: cannot parse %s: %s", query, err)) panic(fmt.Errorf("BUG: cannot parse %s: %w", query, err))
} }
return q return q
} }
@ -657,7 +657,7 @@ func TestStorageSearch(t *testing.T) {
func mustNewStreamFilter(s string) *StreamFilter { func mustNewStreamFilter(s string) *StreamFilter {
sf, err := newStreamFilter(s) sf, err := newStreamFilter(s)
if err != nil { if err != nil {
panic(fmt.Errorf("unexpected error in newStreamFilter(%q): %s", s, err)) panic(fmt.Errorf("unexpected error in newStreamFilter(%q): %w", s, err))
} }
return sf return sf
} }

View file

@ -19,7 +19,7 @@ func newProxyProtocolConn(c net.Conn) (net.Conn, error) {
// Limit the time needed for reading the proxy protocol header. // Limit the time needed for reading the proxy protocol header.
d := time.Now().Add(5 * time.Second) d := time.Now().Add(5 * time.Second)
if err := c.SetReadDeadline(d); err != nil { if err := c.SetReadDeadline(d); err != nil {
return nil, fmt.Errorf("cannot set deadline for reading proxy protocol header: %s", err) return nil, fmt.Errorf("cannot set deadline for reading proxy protocol header: %w", err)
} }
remoteAddr, err := readProxyProto(c) remoteAddr, err := readProxyProto(c)
@ -32,7 +32,7 @@ func newProxyProtocolConn(c net.Conn) (net.Conn, error) {
// Reset the read deadline. // Reset the read deadline.
if err := c.SetReadDeadline(time.Time{}); err != nil { if err := c.SetReadDeadline(time.Time{}); err != nil {
return nil, fmt.Errorf("cannot reset deadline after reading proxy protocol header: %s", err) return nil, fmt.Errorf("cannot reset deadline after reading proxy protocol header: %w", err)
} }
return &proxyProtocolConn{ return &proxyProtocolConn{

View file

@ -599,7 +599,7 @@ func (actx *authContext) initFromAuthorization(baseDir string, az *Authorization
actx.getAuthHeader = func() (string, error) { actx.getAuthHeader = func() (string, error) {
token, err := readPasswordFromFile(filePath) token, err := readPasswordFromFile(filePath)
if err != nil { if err != nil {
return "", fmt.Errorf("cannot read credentials from `credentials_file`=%q: %s", az.CredentialsFile, err) return "", fmt.Errorf("cannot read credentials from `credentials_file`=%q: %w", az.CredentialsFile, err)
} }
return azType + " " + token, nil return azType + " " + token, nil
} }
@ -628,7 +628,7 @@ func (actx *authContext) initFromBasicAuthConfig(baseDir string, ba *BasicAuthCo
actx.getAuthHeader = func() (string, error) { actx.getAuthHeader = func() (string, error) {
password, err := readPasswordFromFile(filePath) password, err := readPasswordFromFile(filePath)
if err != nil { if err != nil {
return "", fmt.Errorf("cannot read password from `password_file`=%q set in `basic_auth` section: %s", ba.PasswordFile, err) return "", fmt.Errorf("cannot read password from `password_file`=%q set in `basic_auth` section: %w", ba.PasswordFile, err)
} }
// See https://en.wikipedia.org/wiki/Basic_access_authentication // See https://en.wikipedia.org/wiki/Basic_access_authentication
token := ba.Username + ":" + password token := ba.Username + ":" + password
@ -644,7 +644,7 @@ func (actx *authContext) initFromBearerTokenFile(baseDir string, bearerTokenFile
actx.getAuthHeader = func() (string, error) { actx.getAuthHeader = func() (string, error) {
token, err := readPasswordFromFile(filePath) token, err := readPasswordFromFile(filePath)
if err != nil { if err != nil {
return "", fmt.Errorf("cannot read bearer token from `bearer_token_file`=%q: %s", bearerTokenFile, err) return "", fmt.Errorf("cannot read bearer token from `bearer_token_file`=%q: %w", bearerTokenFile, err)
} }
return "Bearer " + token, nil return "Bearer " + token, nil
} }
@ -672,11 +672,11 @@ func (actx *authContext) initFromOAuth2Config(baseDir string, o *OAuth2Config) e
} }
ts, err := oi.getTokenSource() ts, err := oi.getTokenSource()
if err != nil { if err != nil {
return "", fmt.Errorf("cannot get OAuth2 tokenSource: %s", err) return "", fmt.Errorf("cannot get OAuth2 tokenSource: %w", err)
} }
t, err := ts.Token() t, err := ts.Token()
if err != nil { if err != nil {
return "", fmt.Errorf("cannot get OAuth2 token: %s", err) return "", fmt.Errorf("cannot get OAuth2 token: %w", err)
} }
return t.Type() + " " + t.AccessToken, nil return t.Type() + " " + t.AccessToken, nil
} }

View file

@ -204,7 +204,7 @@ var (
defaultPromRegex = func() *regexutil.PromRegex { defaultPromRegex = func() *regexutil.PromRegex {
pr, err := regexutil.NewPromRegex(".*") pr, err := regexutil.NewPromRegex(".*")
if err != nil { if err != nil {
panic(fmt.Errorf("BUG: unexpected error: %s", err)) panic(fmt.Errorf("BUG: unexpected error: %w", err))
} }
return pr return pr
}() }()

View file

@ -28,13 +28,13 @@ func writeRelabelDebug(w io.Writer, isTargetRelabel bool, targetID, metric, rela
} }
labels, err := promutils.NewLabelsFromString(metric) labels, err := promutils.NewLabelsFromString(metric)
if err != nil { if err != nil {
err = fmt.Errorf("cannot parse metric: %s", err) err = fmt.Errorf("cannot parse metric: %w", err)
WriteRelabelDebugSteps(w, targetURL, targetID, format, nil, metric, relabelConfigs, err) WriteRelabelDebugSteps(w, targetURL, targetID, format, nil, metric, relabelConfigs, err)
return return
} }
pcs, err := ParseRelabelConfigsData([]byte(relabelConfigs)) pcs, err := ParseRelabelConfigsData([]byte(relabelConfigs))
if err != nil { if err != nil {
err = fmt.Errorf("cannot parse relabel configs: %s", err) err = fmt.Errorf("cannot parse relabel configs: %w", err)
WriteRelabelDebugSteps(w, targetURL, targetID, format, nil, metric, relabelConfigs, err) WriteRelabelDebugSteps(w, targetURL, targetID, format, nil, metric, relabelConfigs, err)
return return
} }

View file

@ -927,7 +927,7 @@ func newTestRegexRelabelConfig(pattern string) *parsedRelabelConfig {
} }
prc, err := parseRelabelConfig(rc) prc, err := parseRelabelConfig(rc)
if err != nil { if err != nil {
panic(fmt.Errorf("unexpected error in parseRelabelConfig: %s", err)) panic(fmt.Errorf("unexpected error in parseRelabelConfig: %w", err))
} }
return prc return prc
} }

View file

@ -119,7 +119,7 @@ func newClient(ctx context.Context, sw *ScrapeWork) (*client, error) {
dialAddr = addMissingPort(dialAddr, isTLS) dialAddr = addMissingPort(dialAddr, isTLS)
dialFunc, err := newStatDialFunc(proxyURL, sw.ProxyAuthConfig) dialFunc, err := newStatDialFunc(proxyURL, sw.ProxyAuthConfig)
if err != nil { if err != nil {
return nil, fmt.Errorf("cannot create dial func: %s", err) return nil, fmt.Errorf("cannot create dial func: %w", err)
} }
hc := &fasthttp.HostClient{ hc := &fasthttp.HostClient{
Addr: dialAddr, Addr: dialAddr,
@ -199,12 +199,12 @@ func (c *client) GetStreamReader() (*streamReader, error) {
err = c.setHeaders(req) err = c.setHeaders(req)
if err != nil { if err != nil {
cancel() cancel()
return nil, fmt.Errorf("failed to create request to %q: %s", c.scrapeURL, err) return nil, fmt.Errorf("failed to create request to %q: %w", c.scrapeURL, err)
} }
err = c.setProxyHeaders(req) err = c.setProxyHeaders(req)
if err != nil { if err != nil {
cancel() cancel()
return nil, fmt.Errorf("failed to create request to %q: %s", c.scrapeURL, err) return nil, fmt.Errorf("failed to create request to %q: %w", c.scrapeURL, err)
} }
scrapeRequests.Inc() scrapeRequests.Inc()
resp, err := c.sc.Do(req) resp, err := c.sc.Do(req)
@ -254,11 +254,11 @@ func (c *client) ReadData(dst []byte) ([]byte, error) {
req.Header.Set("X-Prometheus-Scrape-Timeout-Seconds", c.scrapeTimeoutSecondsStr) req.Header.Set("X-Prometheus-Scrape-Timeout-Seconds", c.scrapeTimeoutSecondsStr)
err := c.setFasthttpHeaders(req) err := c.setFasthttpHeaders(req)
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to create request to %q: %s", c.scrapeURL, err) return nil, fmt.Errorf("failed to create request to %q: %w", c.scrapeURL, err)
} }
err = c.setFasthttpProxyHeaders(req) err = c.setFasthttpProxyHeaders(req)
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to create request to %q: %s", c.scrapeURL, err) return nil, fmt.Errorf("failed to create request to %q: %w", c.scrapeURL, err)
} }
if !*disableCompression && !c.disableCompression { if !*disableCompression && !c.disableCompression {
req.Header.Set("Accept-Encoding", "gzip") req.Header.Set("Accept-Encoding", "gzip")

View file

@ -110,7 +110,7 @@ func (r *listDropletResponse) nextURLPath() (string, error) {
} }
u, err := url.Parse(r.Links.Pages.Next) u, err := url.Parse(r.Links.Pages.Next)
if err != nil { if err != nil {
return "", fmt.Errorf("cannot parse digital ocean next url: %s, err: %s", r.Links.Pages.Next, err) return "", fmt.Errorf("cannot parse digital ocean next url: %s: %w", r.Links.Pages.Next, err)
} }
return u.RequestURI(), nil return u.RequestURI(), nil
} }

View file

@ -85,7 +85,7 @@ func getHTTPTargets(cfg *apiConfig) ([]httpGroupTarget, error) {
func parseAPIResponse(data []byte, path string) ([]httpGroupTarget, error) { func parseAPIResponse(data []byte, path string) ([]httpGroupTarget, error) {
var r []httpGroupTarget var r []httpGroupTarget
if err := json.Unmarshal(data, &r); err != nil { if err := json.Unmarshal(data, &r); err != nil {
return nil, fmt.Errorf("cannot parse http_sd api response path: %s, err: %w", path, err) return nil, fmt.Errorf("cannot parse http_sd api response path=%q: %w", path, err)
} }
return r, nil return r, nil
} }

View file

@ -10,7 +10,7 @@ func BenchmarkPodGetTargetLabels(b *testing.B) {
r := bytes.NewBufferString(testPodsList) r := bytes.NewBufferString(testPodsList)
objectsByKey, _, err := parsePodList(r) objectsByKey, _, err := parsePodList(r)
if err != nil { if err != nil {
panic(fmt.Errorf("BUG: unexpected error: %s", err)) panic(fmt.Errorf("BUG: unexpected error: %w", err))
} }
var o object var o object
for _, srcObject := range objectsByKey { for _, srcObject := range objectsByKey {

View file

@ -579,7 +579,7 @@ func (sw *scrapeWork) scrapeStream(scrapeTimestamp, realTimestamp int64) error {
samplesDropped := 0 samplesDropped := 0
sr, err := sw.GetStreamReader() sr, err := sw.GetStreamReader()
if err != nil { if err != nil {
err = fmt.Errorf("cannot read data: %s", err) err = fmt.Errorf("cannot read data: %w", err)
} else { } else {
var mu sync.Mutex var mu sync.Mutex
err = sbr.Init(sr) err = sbr.Init(sr)
@ -827,7 +827,7 @@ func (sw *scrapeWork) sendStaleSeries(lastScrape, currScrape string, timestamp i
return nil return nil
}, sw.logError) }, sw.logError)
if err != nil { if err != nil {
sw.logError(fmt.Errorf("cannot send stale markers: %s", err).Error()) sw.logError(fmt.Errorf("cannot send stale markers: %w", err).Error())
} }
} }
if addAutoSeries { if addAutoSeries {

View file

@ -64,7 +64,7 @@ func Parse(r io.Reader, contentEncoding string, callback func(series []datadog.S
defer putRequest(req) defer putRequest(req)
if err := req.Unmarshal(ctx.reqBuf.B); err != nil { if err := req.Unmarshal(ctx.reqBuf.B); err != nil {
unmarshalErrors.Inc() unmarshalErrors.Inc()
return fmt.Errorf("cannot unmarshal DataDog POST request with size %d bytes: %s", len(ctx.reqBuf.B), err) return fmt.Errorf("cannot unmarshal DataDog POST request with size %d bytes: %w", len(ctx.reqBuf.B), err)
} }
rows := 0 rows := 0
series := req.Series series := req.Series

View file

@ -58,7 +58,7 @@ func BenchmarkRowsUnmarshal(b *testing.B) {
var r Rows var r Rows
for pb.Next() { for pb.Next() {
if err := r.Unmarshal(reqBody); err != nil { if err := r.Unmarshal(reqBody); err != nil {
panic(fmt.Errorf("unmarshal error: %s", err)) panic(fmt.Errorf("unmarshal error: %w", err))
} }
if len(r.Rows) != 1 { if len(r.Rows) != 1 {
panic(fmt.Errorf("unexpected number of items unmarshaled; got %d; want %d", len(r.Rows), 1)) panic(fmt.Errorf("unexpected number of items unmarshaled; got %d; want %d", len(r.Rows), 1))

View file

@ -132,10 +132,10 @@ func checkParseStream(data []byte, checkSeries func(tss []prompbmarshal.TimeSeri
var bb bytes.Buffer var bb bytes.Buffer
zw := gzip.NewWriter(&bb) zw := gzip.NewWriter(&bb)
if _, err := zw.Write(data); err != nil { if _, err := zw.Write(data); err != nil {
return fmt.Errorf("cannot compress data: %s", err) return fmt.Errorf("cannot compress data: %w", err)
} }
if err := zw.Close(); err != nil { if err := zw.Close(); err != nil {
return fmt.Errorf("cannot close gzip writer: %s", err) return fmt.Errorf("cannot close gzip writer: %w", err)
} }
if err := ParseStream(&bb, true, checkSeries); err != nil { if err := ParseStream(&bb, true, checkSeries); err != nil {
return fmt.Errorf("error when parsing compressed data: %w", err) return fmt.Errorf("error when parsing compressed data: %w", err)

View file

@ -173,7 +173,7 @@ func (u *URL) NewDialFunc(ac *promauth.Config) (fasthttp.DialFunc, error) {
} }
authHeader, err := u.getAuthHeader(ac) authHeader, err := u.getAuthHeader(ac)
if err != nil { if err != nil {
return nil, fmt.Errorf("cannot get auth header: %s", err) return nil, fmt.Errorf("cannot get auth header: %w", err)
} }
if authHeader != "" { if authHeader != "" {
authHeader = "Proxy-Authorization: " + authHeader + "\r\n" authHeader = "Proxy-Authorization: " + authHeader + "\r\n"

View file

@ -141,7 +141,7 @@ func (t *Tracer) AddJSON(jsonTrace []byte) error {
} }
var s *span var s *span
if err := json.Unmarshal(jsonTrace, &s); err != nil { if err := json.Unmarshal(jsonTrace, &s); err != nil {
return fmt.Errorf("cannot unmarshal json trace: %s", err) return fmt.Errorf("cannot unmarshal json trace: %w", err)
} }
child := &Tracer{ child := &Tracer{
span: s, span: s,

View file

@ -87,7 +87,7 @@ func BenchmarkPromRegexMatchString(b *testing.B) {
func benchmarkPromRegexMatchString(b *testing.B, expr, s string, resultExpected bool) { func benchmarkPromRegexMatchString(b *testing.B, expr, s string, resultExpected bool) {
pr, err := NewPromRegex(expr) pr, err := NewPromRegex(expr)
if err != nil { if err != nil {
panic(fmt.Errorf("unexpected error: %s", err)) panic(fmt.Errorf("unexpected error: %w", err))
} }
re := regexp.MustCompile("^(?:" + expr + ")$") re := regexp.MustCompile("^(?:" + expr + ")$")
f := func(b *testing.B, matchString func(s string) bool) { f := func(b *testing.B, matchString func(s string) bool) {

View file

@ -112,7 +112,7 @@ func getTimestampsForPrecisionBits(timestamps []int64, precisionBits uint8) []in
data, marshalType, firstTimestamp := encoding.MarshalTimestamps(nil, timestamps, precisionBits) data, marshalType, firstTimestamp := encoding.MarshalTimestamps(nil, timestamps, precisionBits)
timestampsAdjusted, err := encoding.UnmarshalTimestamps(nil, data, marshalType, firstTimestamp, len(timestamps)) timestampsAdjusted, err := encoding.UnmarshalTimestamps(nil, data, marshalType, firstTimestamp, len(timestamps))
if err != nil { if err != nil {
panic(fmt.Errorf("BUG: cannot unmarshal timestamps with precisionBits %d: %s", precisionBits, err)) panic(fmt.Errorf("BUG: cannot unmarshal timestamps with precisionBits %d: %w", precisionBits, err))
} }
minTimestamp := timestamps[0] minTimestamp := timestamps[0]
maxTimestamp := timestamps[len(timestamps)-1] maxTimestamp := timestamps[len(timestamps)-1]
@ -124,7 +124,7 @@ func getValuesForPrecisionBits(values []int64, precisionBits uint8) []int64 {
data, marshalType, firstValue := encoding.MarshalValues(nil, values, precisionBits) data, marshalType, firstValue := encoding.MarshalValues(nil, values, precisionBits)
valuesAdjusted, err := encoding.UnmarshalValues(nil, data, marshalType, firstValue, len(values)) valuesAdjusted, err := encoding.UnmarshalValues(nil, data, marshalType, firstValue, len(values))
if err != nil { if err != nil {
panic(fmt.Errorf("BUG: cannot unmarshal values with precisionBits %d: %s", precisionBits, err)) panic(fmt.Errorf("BUG: cannot unmarshal values with precisionBits %d: %w", precisionBits, err))
} }
return valuesAdjusted return valuesAdjusted
} }

View file

@ -747,7 +747,7 @@ func (is *indexSearch) getLabelNamesForMetricIDs(qt *querytracer.Tracer, metricI
continue continue
} }
if err := mn.Unmarshal(buf); err != nil { if err := mn.Unmarshal(buf); err != nil {
logger.Panicf("FATAL: cannot unmarshal metricName %q: %w", buf, err) logger.Panicf("FATAL: cannot unmarshal metricName %q: %s", buf, err)
} }
for _, tag := range mn.Tags { for _, tag := range mn.Tags {
if _, ok := lns[string(tag.Key)]; !ok { if _, ok := lns[string(tag.Key)]; !ok {
@ -1610,7 +1610,7 @@ func (db *indexDB) searchMetricIDs(qt *querytracer.Tracer, tfss []*TagFilters, t
localMetricIDs, err := is.searchMetricIDs(qtChild, tfss, tr, maxMetrics) localMetricIDs, err := is.searchMetricIDs(qtChild, tfss, tr, maxMetrics)
db.putIndexSearch(is) db.putIndexSearch(is)
if err != nil { if err != nil {
return nil, fmt.Errorf("error when searching for metricIDs in the current indexdb: %s", err) return nil, fmt.Errorf("error when searching for metricIDs in the current indexdb: %w", err)
} }
qtChild.Done() qtChild.Done()
@ -1635,7 +1635,7 @@ func (db *indexDB) searchMetricIDs(qt *querytracer.Tracer, tfss []*TagFilters, t
extDB.putMetricIDsToTagFiltersCache(qtChild, extMetricIDs, tfKeyExtBuf.B) extDB.putMetricIDsToTagFiltersCache(qtChild, extMetricIDs, tfKeyExtBuf.B)
}) })
if err != nil { if err != nil {
return nil, fmt.Errorf("error when searching for metricIDs in the previous indexdb: %s", err) return nil, fmt.Errorf("error when searching for metricIDs in the previous indexdb: %w", err)
} }
// Merge localMetricIDs with extMetricIDs. // Merge localMetricIDs with extMetricIDs.
@ -1834,7 +1834,7 @@ func (is *indexSearch) searchMetricName(dst []byte, metricID uint64) ([]byte, bo
if err == io.EOF { if err == io.EOF {
return dst, false return dst, false
} }
logger.Panicf("FATAL: error when searching metricName by metricID; searchPrefix %q: %w", kb.B, err) logger.Panicf("FATAL: error when searching metricName by metricID; searchPrefix %q: %s", kb.B, err)
} }
v := ts.Item[len(kb.B):] v := ts.Item[len(kb.B):]
dst = append(dst, v...) dst = append(dst, v...)

View file

@ -50,7 +50,7 @@ func benchmarkPartSearch(b *testing.B, p *part, tsids []TSID, tr TimeRange, spar
blocksRead++ blocksRead++
} }
if err := ps.Error(); err != nil { if err := ps.Error(); err != nil {
panic(fmt.Errorf("BUG: unexpected error: %s", err)) panic(fmt.Errorf("BUG: unexpected error: %w", err))
} }
blocksWant := len(tsids) / sparseness blocksWant := len(tsids) / sparseness
if blocksRead != blocksWant { if blocksRead != blocksWant {