-----BEGIN PGP SIGNATURE-----
 
 iQIzBAABCAAdFiEEcUy2K0fVfVQqlyTqRVgxHPd17HIFAmcWVZgACgkQRVgxHPd1
 7HJ7yg/+LToSxly5iKgyZlyBToTjWIs+NhPyrDJaDXHzXkxMdcc/p43WFazjKD0A
 Sp47oKqSDUU9Bde32mk97jKq6INHQGY3SWKg6EY8pKtTtiJFol9O1Tn7wOFVr9hK
 bcfs8Q+Ibbue/YaDAKM7oaZdSfSGPA8O6vqJPtAaMgRDb7J1mBTA5a2Cs3utE30C
 FRz0wkqwf/zEyle8Tg7e2GXmn3RleiWpinhPyQg4oVoxvid4DCNSAMzmc/gZogN3
 twcf/ynH7RfysoP4iQc6Bsc417lkJvA6TcKLjm+VP6yzXcSXyqwoQSbT7zNdOgwz
 9d7M6LpJZ75voVO18f77pZj/BEYjjAlFrxGxAtT/WAEml/fDYT6NHpLpSmwWXweX
 uJjI5SLr92/0rNWnMicSC/pzd4MQOxjSfF9ij7GqPXxeFt8hGrE5fqbYHz/DXQvM
 kEMtsjDVn40FsXwz0Jxti/zPBI0J/AJlkFJF9xp0jLXYbgDb+3KaJJ3MfmHciw3V
 NrSus28nlfyMba5ktES0ZszWeJk0MTLKmiw9Q6otLDo1gtHW66ijQeIHkEIJ5KhR
 EEAYTEZyXujX96cAfrINHzFJNLFA6Zbx6oKnAZx/mMHbfrt15vd9mVb5NMWcKl65
 DDPF7dFZDB/t3HYPPmwxzdN+LptZAmtQMHh91kJYTuhKhs71lH8=
 =spFl
 -----END PGP SIGNATURE-----
gpgsig -----BEGIN PGP SIGNATURE-----
 
 iQIzBAABCAAdFiEEcUy2K0fVfVQqlyTqRVgxHPd17HIFAmcWiOIACgkQRVgxHPd1
 7HL4Ow//fv7iw0673L/64oGsH1XgHJKfualZj98ql3bWu4iD/LZ4XT/zqUqQD4cA
 80gtMkudTu+qAqDCmj/tJhqaDO5bIChzyLE7Esk5r+7sM1vP7Na1lxZS96r2F2Gg
 1E4gWKbp1e+Ms+ud7d8+B5cbFndZRw3rBxpsONyqQaE/GzpZv1mUomeDUEkSy0Oi
 qUo4V65Ei1ZXDN8saBb0zKDOhTPcICfhmMMyRMcF7wkAR4JRt84nmZrHZ6AORW8K
 xEuz1bXihT0HnLaxQsuPG9WCL0xOqTOnzL2Amtw5sPi6dLOcd6Lp8Z79B/uP3+Iy
 NMOLaaMJldM3pc+ZNDxYKAx4cSzOmECAs9ldiGa2QoxzEAQ4qrNfG/mdrauExVW3
 vVJ0uK5S+GL+rEKGQcD1d7fkTDizPXjuWCgCwmM0j84jriF/0slNcFe+5bPBrw3z
 vvUTyuYsv16abHraEbUq5G5ekRKhAd6QkAzyzTKcrNiKqYL5zlyJAjkfinMdhf9e
 hBvtyZqvkhxgRRL1WlAFsQY+QwkWHLbrTQU8AKB2G4qLQfiqn9Lald6LD/A/HG4E
 uXba+ndGuM0anB43L+W9UjQmF31urxLBuLag59J4JhEmKMYGrxeK4xnhpH8k1hA/
 7T4eREKl03R3IaTg9taOJI6vvnuWxGJ0Q5B/AZ3z32fLXdIMaGc=
 =glUW
 -----END PGP SIGNATURE-----

Merge tag 'v1.105.0' into pmm-6401-read-prometheus-data-files-cpc

v1.105.0
This commit is contained in:
f41gh7 2024-10-21 19:01:12 +02:00
commit cf7eb6bc7c
313 changed files with 10094 additions and 3093 deletions

View file

@ -265,6 +265,14 @@ release-victoria-metrics-windows-goarch: victoria-metrics-windows-$(GOARCH)-prod
cd bin && rm -rf \ cd bin && rm -rf \
victoria-metrics-windows-$(GOARCH)-prod.exe victoria-metrics-windows-$(GOARCH)-prod.exe
release-victoria-logs-bundle: \
release-victoria-logs \
release-vlogscli
publish-victoria-logs-bundle: \
publish-victoria-logs \
publish-vlogscli
release-victoria-logs: release-victoria-logs:
$(MAKE_PARALLEL) release-victoria-logs-linux-386 \ $(MAKE_PARALLEL) release-victoria-logs-linux-386 \
release-victoria-logs-linux-amd64 \ release-victoria-logs-linux-amd64 \

View file

@ -22,7 +22,7 @@ Here are some resources and information about VictoriaMetrics:
- Case studies: [Grammarly, Roblox, Wix,...](https://docs.victoriametrics.com/casestudies/). - Case studies: [Grammarly, Roblox, Wix,...](https://docs.victoriametrics.com/casestudies/).
- Available: [Binary releases](https://github.com/VictoriaMetrics/VictoriaMetrics/releases/latest), [Docker images](https://hub.docker.com/r/victoriametrics/victoria-metrics/), [Source code](https://github.com/VictoriaMetrics/VictoriaMetrics) - Available: [Binary releases](https://github.com/VictoriaMetrics/VictoriaMetrics/releases/latest), [Docker images](https://hub.docker.com/r/victoriametrics/victoria-metrics/), [Source code](https://github.com/VictoriaMetrics/VictoriaMetrics)
- Deployment types: [Single-node version](https://docs.victoriametrics.com/), [Cluster version](https://docs.victoriametrics.com/cluster-victoriametrics/), and [Enterprise version](https://docs.victoriametrics.com/enterprise/) - Deployment types: [Single-node version](https://docs.victoriametrics.com/), [Cluster version](https://docs.victoriametrics.com/cluster-victoriametrics/), and [Enterprise version](https://docs.victoriametrics.com/enterprise/)
- Changelog: [CHANGELOG](https://docs.victoriametrics.com/changelog/), and [How to upgrade](#how-to-upgrade-victoriametrics) - Changelog: [CHANGELOG](https://docs.victoriametrics.com/changelog/), and [How to upgrade](https://docs.victoriametrics.com/#how-to-upgrade-victoriametrics)
- Community: [Slack](https://slack.victoriametrics.com/), [Twitter](https://twitter.com/VictoriaMetrics), [LinkedIn](https://www.linkedin.com/company/victoriametrics/), [YouTube](https://www.youtube.com/@VictoriaMetrics) - Community: [Slack](https://slack.victoriametrics.com/), [Twitter](https://twitter.com/VictoriaMetrics), [LinkedIn](https://www.linkedin.com/company/victoriametrics/), [YouTube](https://www.youtube.com/@VictoriaMetrics)
Yes, we open-source both the single-node VictoriaMetrics and the cluster version. Yes, we open-source both the single-node VictoriaMetrics and the cluster version.
@ -38,17 +38,17 @@ VictoriaMetrics is optimized for timeseries data, even when old time series are
* **Easy to setup**: No dependencies, single [small binary](https://medium.com/@valyala/stripping-dependency-bloat-in-victoriametrics-docker-image-983fb5912b0d), configuration through command-line flags, but the default is also fine-tuned; backup and restore with [instant snapshots](https://medium.com/@valyala/how-victoriametrics-makes-instant-snapshots-for-multi-terabyte-time-series-data-e1f3fb0e0282). * **Easy to setup**: No dependencies, single [small binary](https://medium.com/@valyala/stripping-dependency-bloat-in-victoriametrics-docker-image-983fb5912b0d), configuration through command-line flags, but the default is also fine-tuned; backup and restore with [instant snapshots](https://medium.com/@valyala/how-victoriametrics-makes-instant-snapshots-for-multi-terabyte-time-series-data-e1f3fb0e0282).
* **Global query view**: Multiple Prometheus instances or any other data sources may ingest data into VictoriaMetrics and queried via a single query. * **Global query view**: Multiple Prometheus instances or any other data sources may ingest data into VictoriaMetrics and queried via a single query.
* **Various Protocols**: Support metric scraping, ingestion and backfilling in various protocol. * **Various Protocols**: Support metric scraping, ingestion and backfilling in various protocol.
* [Prometheus exporters](#how-to-scrape-prometheus-exporters-such-as-node-exporter), [Prometheus remote write API](#prometheus-setup), [Prometheus exposition format](#how-to-import-data-in-prometheus-exposition-format). * [Prometheus exporters](https://docs.victoriametrics.com/#how-to-scrape-prometheus-exporters-such-as-node-exporter), [Prometheus remote write API](https://docs.victoriametrics.com/#prometheus-setup), [Prometheus exposition format](https://docs.victoriametrics.com/#how-to-import-data-in-prometheus-exposition-format).
* [InfluxDB line protocol](#how-to-send-data-from-influxdb-compatible-agents-such-as-telegraf) over HTTP, TCP and UDP. * [InfluxDB line protocol](https://docs.victoriametrics.com/#how-to-send-data-from-influxdb-compatible-agents-such-as-telegraf) over HTTP, TCP and UDP.
* [Graphite plaintext protocol](#how-to-send-data-from-graphite-compatible-agents-such-as-statsd) with [tags](https://graphite.readthedocs.io/en/latest/tags.html#carbon). * [Graphite plaintext protocol](https://docs.victoriametrics.com/#how-to-send-data-from-graphite-compatible-agents-such-as-statsd) with [tags](https://graphite.readthedocs.io/en/latest/tags.html#carbon).
* [OpenTSDB put message](#sending-data-via-telnet-put-protocol). * [OpenTSDB put message](https://docs.victoriametrics.com/#sending-data-via-telnet-put-protocol).
* [HTTP OpenTSDB /api/put requests](#sending-opentsdb-data-via-http-apiput-requests). * [HTTP OpenTSDB /api/put requests](https://docs.victoriametrics.com/#sending-opentsdb-data-via-http-apiput-requests).
* [JSON line format](#how-to-import-data-in-json-line-format). * [JSON line format](https://docs.victoriametrics.com/#how-to-import-data-in-json-line-format).
* [Arbitrary CSV data](#how-to-import-csv-data). * [Arbitrary CSV data](https://docs.victoriametrics.com/#how-to-import-csv-data).
* [Native binary format](#how-to-import-data-in-native-format). * [Native binary format](https://docs.victoriametrics.com/#how-to-import-data-in-native-format).
* [DataDog agent or DogStatsD](#how-to-send-data-from-datadog-agent). * [DataDog agent or DogStatsD](https://docs.victoriametrics.com/#how-to-send-data-from-datadog-agent).
* [NewRelic infrastructure agent](#how-to-send-data-from-newrelic-agent). * [NewRelic infrastructure agent](https://docs.victoriametrics.com/#how-to-send-data-from-newrelic-agent).
* [OpenTelemetry metrics format](#sending-data-via-opentelemetry). * [OpenTelemetry metrics format](https://docs.victoriametrics.com/#sending-data-via-opentelemetry).
* **NFS-based storages**: Supports storing data on NFS-based storages such as Amazon EFS, Google Filestore. * **NFS-based storages**: Supports storing data on NFS-based storages such as Amazon EFS, Google Filestore.
* And many other features such as metrics relabeling, cardinality limiter, etc. * And many other features such as metrics relabeling, cardinality limiter, etc.

View file

@ -92,6 +92,9 @@ func requestHandler(w http.ResponseWriter, r *http.Request) bool {
if vlselect.RequestHandler(w, r) { if vlselect.RequestHandler(w, r) {
return true return true
} }
if vlstorage.RequestHandler(w, r) {
return true
}
return false return false
} }

View file

@ -86,10 +86,10 @@ func TestReadBulkRequest_Success(t *testing.T) {
msgField := "message" msgField := "message"
rowsExpected := 4 rowsExpected := 4
timestampsExpected := []int64{1686026891735000000, 1686023292735000000, 1686026893735000000, 1686026893000000000} timestampsExpected := []int64{1686026891735000000, 1686023292735000000, 1686026893735000000, 1686026893000000000}
resultExpected := `{"@timestamp":"","log.offset":"71770","log.file.path":"/var/log/auth.log","_msg":"foobar"} resultExpected := `{"log.offset":"71770","log.file.path":"/var/log/auth.log","_msg":"foobar"}
{"@timestamp":"","_msg":"baz"} {"_msg":"baz"}
{"_msg":"xyz","@timestamp":"","x":"y"} {"_msg":"xyz","x":"y"}
{"_msg":"qwe rty","@timestamp":""}` {"_msg":"qwe rty"}`
f(data, timeField, msgField, rowsExpected, timestampsExpected, resultExpected) f(data, timeField, msgField, rowsExpected, timestampsExpected, resultExpected)
} }

View file

@ -30,9 +30,9 @@ func TestProcessStreamInternal_Success(t *testing.T) {
msgField := "message" msgField := "message"
rowsExpected := 3 rowsExpected := 3
timestampsExpected := []int64{1686026891735000000, 1686023292735000000, 1686026893735000000} timestampsExpected := []int64{1686026891735000000, 1686023292735000000, 1686026893735000000}
resultExpected := `{"@timestamp":"","log.offset":"71770","log.file.path":"/var/log/auth.log","_msg":"foobar"} resultExpected := `{"log.offset":"71770","log.file.path":"/var/log/auth.log","_msg":"foobar"}
{"@timestamp":"","_msg":"baz"} {"_msg":"baz"}
{"_msg":"xyz","@timestamp":"","x":"y"}` {"_msg":"xyz","x":"y"}`
f(data, timeField, msgField, rowsExpected, timestampsExpected, resultExpected) f(data, timeField, msgField, rowsExpected, timestampsExpected, resultExpected)
} }

View file

@ -101,9 +101,9 @@ func TestProcessStreamInternal_Success(t *testing.T) {
currentYear := 2023 currentYear := 2023
rowsExpected := 3 rowsExpected := 3
timestampsExpected := []int64{1685794113000000000, 1685880513000000000, 1685814132345000000} timestampsExpected := []int64{1685794113000000000, 1685880513000000000, 1685814132345000000}
resultExpected := `{"format":"rfc3164","timestamp":"","hostname":"abcd","app_name":"systemd","_msg":"Starting Update the local ESM caches..."} resultExpected := `{"format":"rfc3164","hostname":"abcd","app_name":"systemd","_msg":"Starting Update the local ESM caches..."}
{"priority":"165","facility":"20","severity":"5","format":"rfc3164","timestamp":"","hostname":"abcd","app_name":"systemd","proc_id":"345","_msg":"abc defg"} {"priority":"165","facility":"20","severity":"5","format":"rfc3164","hostname":"abcd","app_name":"systemd","proc_id":"345","_msg":"abc defg"}
{"priority":"123","facility":"15","severity":"3","format":"rfc5424","timestamp":"","hostname":"mymachine.example.com","app_name":"appname","proc_id":"12345","msg_id":"ID47","exampleSDID@32473.iut":"3","exampleSDID@32473.eventSource":"Application 123 = ] 56","exampleSDID@32473.eventID":"11211","_msg":"This is a test message with structured data."}` {"priority":"123","facility":"15","severity":"3","format":"rfc5424","hostname":"mymachine.example.com","app_name":"appname","proc_id":"12345","msg_id":"ID47","exampleSDID@32473.iut":"3","exampleSDID@32473.eventSource":"Application 123 = ] 56","exampleSDID@32473.eventID":"11211","_msg":"This is a test message with structured data."}`
f(data, currentYear, rowsExpected, timestampsExpected, resultExpected) f(data, currentYear, rowsExpected, timestampsExpected, resultExpected)
} }

View file

@ -1,32 +1,73 @@
package main package main
import ( import (
"bufio"
"encoding/json" "encoding/json"
"fmt" "fmt"
"io" "io"
"sort"
"sync" "sync"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/logstorage"
) )
type outputMode int
const (
outputModeJSONMultiline = outputMode(0)
outputModeJSONSingleline = outputMode(1)
outputModeLogfmt = outputMode(2)
outputModeCompact = outputMode(3)
)
func getOutputFormatter(outputMode outputMode) func(w io.Writer, fields []logstorage.Field) error {
switch outputMode {
case outputModeJSONMultiline:
return func(w io.Writer, fields []logstorage.Field) error {
return writeJSONObject(w, fields, true)
}
case outputModeJSONSingleline:
return func(w io.Writer, fields []logstorage.Field) error {
return writeJSONObject(w, fields, false)
}
case outputModeLogfmt:
return writeLogfmtObject
case outputModeCompact:
return writeCompactObject
default:
panic(fmt.Errorf("BUG: unexpected outputMode=%d", outputMode))
}
}
type jsonPrettifier struct { type jsonPrettifier struct {
rOriginal io.ReadCloser r io.ReadCloser
formatter func(w io.Writer, fields []logstorage.Field) error
d *json.Decoder d *json.Decoder
pr *io.PipeReader pr *io.PipeReader
pw *io.PipeWriter pw *io.PipeWriter
bw *bufio.Writer
wg sync.WaitGroup wg sync.WaitGroup
} }
func newJSONPrettifier(r io.ReadCloser) *jsonPrettifier { func newJSONPrettifier(r io.ReadCloser, outputMode outputMode) *jsonPrettifier {
d := json.NewDecoder(r) d := json.NewDecoder(r)
pr, pw := io.Pipe() pr, pw := io.Pipe()
bw := bufio.NewWriter(pw)
formatter := getOutputFormatter(outputMode)
jp := &jsonPrettifier{ jp := &jsonPrettifier{
rOriginal: r, r: r,
d: d, formatter: formatter,
pr: pr,
pw: pw, d: d,
pr: pr,
pw: pw,
bw: bw,
} }
jp.wg.Add(1) jp.wg.Add(1)
@ -46,15 +87,19 @@ func (jp *jsonPrettifier) closePipesWithError(err error) {
func (jp *jsonPrettifier) prettifyJSONLines() error { func (jp *jsonPrettifier) prettifyJSONLines() error {
for jp.d.More() { for jp.d.More() {
var v any fields, err := readNextJSONObject(jp.d)
if err := jp.d.Decode(&v); err != nil { if err != nil {
return err return err
} }
line, err := json.MarshalIndent(v, "", " ") sort.Slice(fields, func(i, j int) bool {
if err != nil { return fields[i].Name < fields[j].Name
panic(fmt.Errorf("BUG: cannot marshal %v to JSON: %w", v, err)) })
if err := jp.formatter(jp.bw, fields); err != nil {
return err
} }
if _, err := fmt.Fprintf(jp.pw, "%s\n", line); err != nil {
// Flush bw after every output line in order to show results as soon as they appear.
if err := jp.bw.Flush(); err != nil {
return err return err
} }
} }
@ -63,7 +108,7 @@ func (jp *jsonPrettifier) prettifyJSONLines() error {
func (jp *jsonPrettifier) Close() error { func (jp *jsonPrettifier) Close() error {
jp.closePipesWithError(io.ErrUnexpectedEOF) jp.closePipesWithError(io.ErrUnexpectedEOF)
err := jp.rOriginal.Close() err := jp.r.Close()
jp.wg.Wait() jp.wg.Wait()
return err return err
} }
@ -71,3 +116,123 @@ func (jp *jsonPrettifier) Close() error {
func (jp *jsonPrettifier) Read(p []byte) (int, error) { func (jp *jsonPrettifier) Read(p []byte) (int, error) {
return jp.pr.Read(p) return jp.pr.Read(p)
} }
func readNextJSONObject(d *json.Decoder) ([]logstorage.Field, error) {
t, err := d.Token()
if err != nil {
return nil, fmt.Errorf("cannot read '{': %w", err)
}
delim, ok := t.(json.Delim)
if !ok || delim.String() != "{" {
return nil, fmt.Errorf("unexpected token read; got %q; want '{'", delim)
}
var fields []logstorage.Field
for {
// Read object key
t, err := d.Token()
if err != nil {
return nil, fmt.Errorf("cannot read JSON object key or closing brace: %w", err)
}
delim, ok := t.(json.Delim)
if ok {
if delim.String() == "}" {
return fields, nil
}
return nil, fmt.Errorf("unexpected delimiter read; got %q; want '}'", delim)
}
key, ok := t.(string)
if !ok {
return nil, fmt.Errorf("unexpected token read for object key: %v; want string or '}'", t)
}
// read object value
t, err = d.Token()
if err != nil {
return nil, fmt.Errorf("cannot read JSON object value: %w", err)
}
value, ok := t.(string)
if !ok {
return nil, fmt.Errorf("unexpected token read for oject value: %v; want string", t)
}
fields = append(fields, logstorage.Field{
Name: key,
Value: value,
})
}
}
func writeLogfmtObject(w io.Writer, fields []logstorage.Field) error {
data := logstorage.MarshalFieldsToLogfmt(nil, fields)
_, err := fmt.Fprintf(w, "%s\n", data)
return err
}
func writeCompactObject(w io.Writer, fields []logstorage.Field) error {
if len(fields) == 1 {
// Just write field value as is without name
_, err := fmt.Fprintf(w, "%s\n", fields[0].Value)
return err
}
if len(fields) == 2 && fields[0].Name == "_time" || fields[1].Name == "_time" {
// Write _time\tfieldValue as is
if fields[0].Name == "_time" {
_, err := fmt.Fprintf(w, "%s\t%s\n", fields[0].Value, fields[1].Value)
return err
}
_, err := fmt.Fprintf(w, "%s\t%s\n", fields[1].Value, fields[0].Value)
return err
}
// Fall back to logfmt
return writeLogfmtObject(w, fields)
}
func writeJSONObject(w io.Writer, fields []logstorage.Field, isMultiline bool) error {
if len(fields) == 0 {
fmt.Fprintf(w, "{}\n")
return nil
}
fmt.Fprintf(w, "{")
writeNewlineIfNeeded(w, isMultiline)
if err := writeJSONObjectKeyValue(w, fields[0], isMultiline); err != nil {
return err
}
for _, f := range fields[1:] {
fmt.Fprintf(w, ",")
writeNewlineIfNeeded(w, isMultiline)
if err := writeJSONObjectKeyValue(w, f, isMultiline); err != nil {
return err
}
}
writeNewlineIfNeeded(w, isMultiline)
fmt.Fprintf(w, "}\n")
return nil
}
func writeNewlineIfNeeded(w io.Writer, isMultiline bool) {
if isMultiline {
fmt.Fprintf(w, "\n")
}
}
func writeJSONObjectKeyValue(w io.Writer, f logstorage.Field, isMultiline bool) error {
key := getJSONString(f.Name)
value := getJSONString(f.Value)
if isMultiline {
_, err := fmt.Fprintf(w, " %s: %s", key, value)
return err
}
_, err := fmt.Fprintf(w, "%s:%s", key, value)
return err
}
func getJSONString(s string) string {
data, err := json.Marshal(s)
if err != nil {
panic(fmt.Errorf("unexpected error when marshaling string to JSON: %w", err))
}
return string(data)
}

View file

@ -27,9 +27,13 @@ import (
var ( var (
datasourceURL = flag.String("datasource.url", "http://localhost:9428/select/logsql/query", "URL for querying VictoriaLogs; "+ datasourceURL = flag.String("datasource.url", "http://localhost:9428/select/logsql/query", "URL for querying VictoriaLogs; "+
"see https://docs.victoriametrics.com/victorialogs/querying/#querying-logs") "see https://docs.victoriametrics.com/victorialogs/querying/#querying-logs . See also -tail.url")
tailURL = flag.String("tail.url", "", "URL for live tailing queries to VictoriaLogs; see https://docs.victoriametrics.com/victorialogs/querying/#live-tailing ."+
"The url is automatically detected from -datasource.url by replacing /query with /tail at the end if -tail.url is empty")
historyFile = flag.String("historyFile", "vlogscli-history", "Path to file with command history") historyFile = flag.String("historyFile", "vlogscli-history", "Path to file with command history")
header = flagutil.NewArrayString("header", "Optional header to pass in request -datasource.url in the form 'HeaderName: value'") header = flagutil.NewArrayString("header", "Optional header to pass in request -datasource.url in the form 'HeaderName: value'")
accountID = flag.Int("accountID", 0, "Account ID to query; see https://docs.victoriametrics.com/victorialogs/#multitenancy")
projectID = flag.Int("projectID", 0, "Project ID to query; see https://docs.victoriametrics.com/victorialogs/#multitenancy")
) )
const ( const (
@ -51,12 +55,12 @@ func main() {
} }
headers = hes headers = hes
isEmptyLine := true incompleteLine := ""
cfg := &readline.Config{ cfg := &readline.Config{
Prompt: firstLinePrompt, Prompt: firstLinePrompt,
DisableAutoSaveHistory: true, DisableAutoSaveHistory: true,
Listener: func(line []rune, pos int, _ rune) ([]rune, int, bool) { Listener: func(line []rune, pos int, _ rune) ([]rune, int, bool) {
isEmptyLine = len(line) == 0 incompleteLine = string(line)
return line, pos, false return line, pos, false
}, },
} }
@ -67,7 +71,7 @@ func main() {
fmt.Fprintf(rl, "sending queries to %s\n", *datasourceURL) fmt.Fprintf(rl, "sending queries to %s\n", *datasourceURL)
runReadlineLoop(rl, &isEmptyLine) runReadlineLoop(rl, &incompleteLine)
if err := rl.Close(); err != nil { if err := rl.Close(); err != nil {
fatalf("cannot close readline: %s", err) fatalf("cannot close readline: %s", err)
@ -75,7 +79,7 @@ func main() {
} }
func runReadlineLoop(rl *readline.Instance, isEmptyLine *bool) { func runReadlineLoop(rl *readline.Instance, incompleteLine *string) {
historyLines, err := loadFromHistory(*historyFile) historyLines, err := loadFromHistory(*historyFile)
if err != nil { if err != nil {
fatalf("cannot load query history: %s", err) fatalf("cannot load query history: %s", err)
@ -86,6 +90,7 @@ func runReadlineLoop(rl *readline.Instance, isEmptyLine *bool) {
} }
} }
outputMode := outputModeJSONMultiline
s := "" s := ""
for { for {
line, err := rl.ReadLine() line, err := rl.ReadLine()
@ -94,17 +99,17 @@ func runReadlineLoop(rl *readline.Instance, isEmptyLine *bool) {
case io.EOF: case io.EOF:
if s != "" { if s != "" {
// This is non-interactive query execution. // This is non-interactive query execution.
if err := executeQuery(context.Background(), rl, s); err != nil { executeQuery(context.Background(), rl, s, outputMode)
fmt.Fprintf(rl, "%s\n", err)
}
} }
return return
case readline.ErrInterrupt: case readline.ErrInterrupt:
if s == "" && *isEmptyLine { if s == "" && *incompleteLine == "" {
fmt.Fprintf(rl, "interrupted\n") fmt.Fprintf(rl, "interrupted\n")
os.Exit(128 + int(syscall.SIGINT)) os.Exit(128 + int(syscall.SIGINT))
} }
// Default value for Ctrl+C - clear the prompt // Default value for Ctrl+C - clear the prompt and store the incompletely entered line into history
s += *incompleteLine
historyLines = pushToHistory(rl, historyLines, s)
s = "" s = ""
rl.SetPrompt(firstLinePrompt) rl.SetPrompt(firstLinePrompt)
continue continue
@ -114,14 +119,50 @@ func runReadlineLoop(rl *readline.Instance, isEmptyLine *bool) {
} }
s += line s += line
if isQuitCommand(s) {
fmt.Fprintf(rl, "bye!\n")
return
}
if s == "" { if s == "" {
// Skip empty lines // Skip empty lines
continue continue
} }
if isQuitCommand(s) {
fmt.Fprintf(rl, "bye!\n")
_ = pushToHistory(rl, historyLines, s)
return
}
if isHelpCommand(s) {
printCommandsHelp(rl)
historyLines = pushToHistory(rl, historyLines, s)
s = ""
continue
}
if s == `\s` {
fmt.Fprintf(rl, "singleline json output mode\n")
outputMode = outputModeJSONSingleline
historyLines = pushToHistory(rl, historyLines, s)
s = ""
continue
}
if s == `\m` {
fmt.Fprintf(rl, "multiline json output mode\n")
outputMode = outputModeJSONMultiline
historyLines = pushToHistory(rl, historyLines, s)
s = ""
continue
}
if s == `\c` {
fmt.Fprintf(rl, "compact output mode\n")
outputMode = outputModeCompact
historyLines = pushToHistory(rl, historyLines, s)
s = ""
continue
}
if s == `\logfmt` {
fmt.Fprintf(rl, "logfmt output mode\n")
outputMode = outputModeLogfmt
historyLines = pushToHistory(rl, historyLines, s)
s = ""
continue
}
if line != "" && !strings.HasSuffix(line, ";") { if line != "" && !strings.HasSuffix(line, ";") {
// Assume the query is incomplete and allow the user finishing the query on the next line // Assume the query is incomplete and allow the user finishing the query on the next line
s += "\n" s += "\n"
@ -131,36 +172,32 @@ func runReadlineLoop(rl *readline.Instance, isEmptyLine *bool) {
// Execute the query // Execute the query
ctx, cancel := signal.NotifyContext(context.Background(), os.Interrupt) ctx, cancel := signal.NotifyContext(context.Background(), os.Interrupt)
err = executeQuery(ctx, rl, s) executeQuery(ctx, rl, s, outputMode)
cancel() cancel()
if err != nil { historyLines = pushToHistory(rl, historyLines, s)
if errors.Is(err, context.Canceled) {
fmt.Fprintf(rl, "\n")
} else {
fmt.Fprintf(rl, "%s\n", err)
}
// Save queries in the history even if they weren't finished successfully
}
s = strings.TrimSpace(s)
if len(historyLines) == 0 || historyLines[len(historyLines)-1] != s {
historyLines = append(historyLines, s)
if len(historyLines) > 500 {
historyLines = historyLines[len(historyLines)-500:]
}
if err := saveToHistory(*historyFile, historyLines); err != nil {
fatalf("cannot save query history: %s", err)
}
}
if err := rl.SaveToHistory(s); err != nil {
fatalf("cannot update query history: %s", err)
}
s = "" s = ""
rl.SetPrompt(firstLinePrompt) rl.SetPrompt(firstLinePrompt)
} }
} }
func pushToHistory(rl *readline.Instance, historyLines []string, s string) []string {
s = strings.TrimSpace(s)
if len(historyLines) == 0 || historyLines[len(historyLines)-1] != s {
historyLines = append(historyLines, s)
if len(historyLines) > 500 {
historyLines = historyLines[len(historyLines)-500:]
}
if err := saveToHistory(*historyFile, historyLines); err != nil {
fatalf("cannot save query history: %s", err)
}
}
if err := rl.SaveToHistory(s); err != nil {
fatalf("cannot update query history: %s", err)
}
return historyLines
}
func loadFromHistory(filePath string) ([]string, error) { func loadFromHistory(filePath string) ([]string, error) {
data, err := os.ReadFile(filePath) data, err := os.ReadFile(filePath)
if err != nil { if err != nil {
@ -198,29 +235,112 @@ func saveToHistory(filePath string, lines []string) error {
func isQuitCommand(s string) bool { func isQuitCommand(s string) bool {
switch s { switch s {
case "q", "quit", "exit": case `\q`, "q", "quit", "exit":
return true return true
default: default:
return false return false
} }
} }
func executeQuery(ctx context.Context, output io.Writer, s string) error { func isHelpCommand(s string) bool {
// Parse the query and convert it to canonical view. switch s {
s = strings.TrimSuffix(s, ";") case `\h`, "h", "help", "?":
q, err := logstorage.ParseQuery(s) return true
if err != nil { default:
return fmt.Errorf("cannot parse query: %w", err) return false
} }
qStr := q.String() }
func printCommandsHelp(w io.Writer) {
fmt.Fprintf(w, "%s", `List of available commands:
\q - quit
\h - show this help
\s - singleline json output mode
\m - multiline json output mode
\c - compact output
\logfmt - logfmt output mode
\tail <query> - live tail <query> results
`)
}
func executeQuery(ctx context.Context, output io.Writer, qStr string, outputMode outputMode) {
if strings.HasPrefix(qStr, `\tail `) {
tailQuery(ctx, output, qStr, outputMode)
return
}
respBody := getQueryResponse(ctx, output, qStr, outputMode, *datasourceURL)
if respBody == nil {
return
}
defer func() {
_ = respBody.Close()
}()
if err := readWithLess(respBody); err != nil {
fmt.Fprintf(output, "error when reading query response: %s\n", err)
return
}
}
func tailQuery(ctx context.Context, output io.Writer, qStr string, outputMode outputMode) {
qStr = strings.TrimPrefix(qStr, `\tail `)
qURL, err := getTailURL()
if err != nil {
fmt.Fprintf(output, "%s\n", err)
return
}
respBody := getQueryResponse(ctx, output, qStr, outputMode, qURL)
if respBody == nil {
return
}
defer func() {
_ = respBody.Close()
}()
if _, err := io.Copy(output, respBody); err != nil {
if !errors.Is(err, context.Canceled) && !isErrPipe(err) {
fmt.Fprintf(output, "error when live tailing query response: %s\n", err)
}
fmt.Fprintf(output, "\n")
return
}
}
func getTailURL() (string, error) {
if *tailURL != "" {
return *tailURL, nil
}
u, err := url.Parse(*datasourceURL)
if err != nil {
return "", fmt.Errorf("cannot parse -datasource.url=%q: %w", *datasourceURL, err)
}
if !strings.HasSuffix(u.Path, "/query") {
return "", fmt.Errorf("cannot find /query suffix in -datasource.url=%q", *datasourceURL)
}
u.Path = u.Path[:len(u.Path)-len("/query")] + "/tail"
return u.String(), nil
}
func getQueryResponse(ctx context.Context, output io.Writer, qStr string, outputMode outputMode, qURL string) io.ReadCloser {
// Parse the query and convert it to canonical view.
qStr = strings.TrimSuffix(qStr, ";")
q, err := logstorage.ParseQuery(qStr)
if err != nil {
fmt.Fprintf(output, "cannot parse query: %s\n", err)
return nil
}
qStr = q.String()
fmt.Fprintf(output, "executing [%s]...", qStr) fmt.Fprintf(output, "executing [%s]...", qStr)
// Prepare HTTP request for VictoriaLogs // Prepare HTTP request for qURL
args := make(url.Values) args := make(url.Values)
args.Set("query", qStr) args.Set("query", qStr)
data := strings.NewReader(args.Encode()) data := strings.NewReader(args.Encode())
req, err := http.NewRequestWithContext(ctx, "POST", *datasourceURL, data) req, err := http.NewRequestWithContext(ctx, "POST", qURL, data)
if err != nil { if err != nil {
panic(fmt.Errorf("BUG: cannot prepare request to server: %w", err)) panic(fmt.Errorf("BUG: cannot prepare request to server: %w", err))
} }
@ -228,38 +348,37 @@ func executeQuery(ctx context.Context, output io.Writer, s string) error {
for _, h := range headers { for _, h := range headers {
req.Header.Set(h.Name, h.Value) req.Header.Set(h.Name, h.Value)
} }
req.Header.Set("AccountID", strconv.Itoa(*accountID))
req.Header.Set("ProjectID", strconv.Itoa(*projectID))
// Execute HTTP request at VictoriaLogs // Execute HTTP request at qURL
startTime := time.Now() startTime := time.Now()
resp, err := httpClient.Do(req) resp, err := httpClient.Do(req)
queryDuration := time.Since(startTime) queryDuration := time.Since(startTime)
fmt.Fprintf(output, "; duration: %.3fs\n", queryDuration.Seconds()) fmt.Fprintf(output, "; duration: %.3fs\n", queryDuration.Seconds())
if err != nil { if err != nil {
return fmt.Errorf("cannot execute query: %w", err) if errors.Is(err, context.Canceled) {
fmt.Fprintf(output, "\n")
} else {
fmt.Fprintf(output, "cannot execute query: %s\n", err)
}
return nil
} }
defer func() {
_ = resp.Body.Close()
}()
// Verify response code
if resp.StatusCode != http.StatusOK { if resp.StatusCode != http.StatusOK {
body, err := io.ReadAll(resp.Body) body, err := io.ReadAll(resp.Body)
if err != nil { if err != nil {
body = []byte(fmt.Sprintf("cannot read response body: %s", err)) body = []byte(fmt.Sprintf("cannot read response body: %s", err))
} }
return fmt.Errorf("unexpected status code: %d; response body:\n%s", resp.StatusCode, body) fmt.Fprintf(output, "unexpected status code: %d; response body:\n%s\n", resp.StatusCode, body)
return nil
} }
// Prettify the response and stream it to 'less'. // Prettify the response body
jp := newJSONPrettifier(resp.Body) jp := newJSONPrettifier(resp.Body, outputMode)
defer func() {
_ = jp.Close()
}()
if err := readWithLess(jp); err != nil { return jp
return fmt.Errorf("error when reading query response: %w", err)
}
return nil
} }
var httpClient = &http.Client{} var httpClient = &http.Client{}

View file

@ -394,7 +394,9 @@ func ProcessLiveTailRequest(ctx context.Context, w http.ResponseWriter, r *http.
return return
} }
if !q.CanLiveTail() { if !q.CanLiveTail() {
httpserver.Errorf(w, r, "the query [%s] cannot be used in live tailing; see https://docs.victoriametrics.com/victorialogs/querying/#live-tailing for details", q) httpserver.Errorf(w, r, "the query [%s] cannot be used in live tailing; "+
"see https://docs.victoriametrics.com/victorialogs/querying/#live-tailing for details", q)
return
} }
q.Optimize() q.Optimize()

View file

@ -6,15 +6,31 @@
// JSONRow creates JSON row from the given fields. // JSONRow creates JSON row from the given fields.
{% func JSONRow(columns []logstorage.BlockColumn, rowIdx int) %} {% func JSONRow(columns []logstorage.BlockColumn, rowIdx int) %}
{ {% code
{% code c := &columns[0] %} i := 0
for i < len(columns) && columns[i].Values[rowIdx] == "" {
i++
}
columns = columns[i:]
%}
{% if len(columns) == 0 %}
{% return %}
{% endif %}
{
{% code c := &columns[0] %}
{%q= c.Name %}:{%q= c.Values[rowIdx] %} {%q= c.Name %}:{%q= c.Values[rowIdx] %}
{% code columns = columns[1:] %} {% code columns = columns[1:] %}
{% for colIdx := range columns %} {% for colIdx := range columns %}
{% code c := &columns[colIdx] %} {% code
c := &columns[colIdx]
v := c.Values[rowIdx]
%}
{% if v == "" %}
{% continue %}
{% endif %}
,{%q= c.Name %}:{%q= c.Values[rowIdx] %} ,{%q= c.Name %}:{%q= c.Values[rowIdx] %}
{% endfor %} {% endfor %}
}{% newline %} }{% newline %}
{% endfunc %} {% endfunc %}
// JSONRows prints formatted rows // JSONRows prints formatted rows
@ -23,7 +39,11 @@
{% return %} {% return %}
{% endif %} {% endif %}
{% for _, fields := range rows %} {% for _, fields := range rows %}
{ {% code fields = logstorage.SkipLeadingFieldsWithoutValues(fields) %}
{% if len(fields) == 0 %}
{% continue %}
{% endif %}
{
{% if len(fields) > 0 %} {% if len(fields) > 0 %}
{% code {% code
f := fields[0] f := fields[0]
@ -31,10 +51,13 @@
%} %}
{%q= f.Name %}:{%q= f.Value %} {%q= f.Name %}:{%q= f.Value %}
{% for _, f := range fields %} {% for _, f := range fields %}
{% if f.Value == "" %}
{% continue %}
{% endif %}
,{%q= f.Name %}:{%q= f.Value %} ,{%q= f.Name %}:{%q= f.Value %}
{% endfor %} {% endfor %}
{% endif %} {% endif %}
}{% newline %} }{% newline %}
{% endfor %} {% endfor %}
{% endfunc %} {% endfunc %}

View file

@ -26,141 +26,176 @@ var (
//line app/vlselect/logsql/query_response.qtpl:8 //line app/vlselect/logsql/query_response.qtpl:8
func StreamJSONRow(qw422016 *qt422016.Writer, columns []logstorage.BlockColumn, rowIdx int) { func StreamJSONRow(qw422016 *qt422016.Writer, columns []logstorage.BlockColumn, rowIdx int) {
//line app/vlselect/logsql/query_response.qtpl:8
qw422016.N().S(`{`)
//line app/vlselect/logsql/query_response.qtpl:10 //line app/vlselect/logsql/query_response.qtpl:10
i := 0
for i < len(columns) && columns[i].Values[rowIdx] == "" {
i++
}
columns = columns[i:]
//line app/vlselect/logsql/query_response.qtpl:16
if len(columns) == 0 {
//line app/vlselect/logsql/query_response.qtpl:17
return
//line app/vlselect/logsql/query_response.qtpl:18
}
//line app/vlselect/logsql/query_response.qtpl:18
qw422016.N().S(`{`)
//line app/vlselect/logsql/query_response.qtpl:20
c := &columns[0] c := &columns[0]
//line app/vlselect/logsql/query_response.qtpl:11 //line app/vlselect/logsql/query_response.qtpl:21
qw422016.N().Q(c.Name) qw422016.N().Q(c.Name)
//line app/vlselect/logsql/query_response.qtpl:11 //line app/vlselect/logsql/query_response.qtpl:21
qw422016.N().S(`:`) qw422016.N().S(`:`)
//line app/vlselect/logsql/query_response.qtpl:11 //line app/vlselect/logsql/query_response.qtpl:21
qw422016.N().Q(c.Values[rowIdx]) qw422016.N().Q(c.Values[rowIdx])
//line app/vlselect/logsql/query_response.qtpl:12 //line app/vlselect/logsql/query_response.qtpl:22
columns = columns[1:] columns = columns[1:]
//line app/vlselect/logsql/query_response.qtpl:13 //line app/vlselect/logsql/query_response.qtpl:23
for colIdx := range columns { for colIdx := range columns {
//line app/vlselect/logsql/query_response.qtpl:14 //line app/vlselect/logsql/query_response.qtpl:25
c := &columns[colIdx] c := &columns[colIdx]
v := c.Values[rowIdx]
//line app/vlselect/logsql/query_response.qtpl:14 //line app/vlselect/logsql/query_response.qtpl:28
if v == "" {
//line app/vlselect/logsql/query_response.qtpl:29
continue
//line app/vlselect/logsql/query_response.qtpl:30
}
//line app/vlselect/logsql/query_response.qtpl:30
qw422016.N().S(`,`) qw422016.N().S(`,`)
//line app/vlselect/logsql/query_response.qtpl:15 //line app/vlselect/logsql/query_response.qtpl:31
qw422016.N().Q(c.Name) qw422016.N().Q(c.Name)
//line app/vlselect/logsql/query_response.qtpl:15 //line app/vlselect/logsql/query_response.qtpl:31
qw422016.N().S(`:`) qw422016.N().S(`:`)
//line app/vlselect/logsql/query_response.qtpl:15 //line app/vlselect/logsql/query_response.qtpl:31
qw422016.N().Q(c.Values[rowIdx]) qw422016.N().Q(c.Values[rowIdx])
//line app/vlselect/logsql/query_response.qtpl:16 //line app/vlselect/logsql/query_response.qtpl:32
} }
//line app/vlselect/logsql/query_response.qtpl:16 //line app/vlselect/logsql/query_response.qtpl:32
qw422016.N().S(`}`) qw422016.N().S(`}`)
//line app/vlselect/logsql/query_response.qtpl:17 //line app/vlselect/logsql/query_response.qtpl:33
qw422016.N().S(` qw422016.N().S(`
`) `)
//line app/vlselect/logsql/query_response.qtpl:18 //line app/vlselect/logsql/query_response.qtpl:34
} }
//line app/vlselect/logsql/query_response.qtpl:18 //line app/vlselect/logsql/query_response.qtpl:34
func WriteJSONRow(qq422016 qtio422016.Writer, columns []logstorage.BlockColumn, rowIdx int) { func WriteJSONRow(qq422016 qtio422016.Writer, columns []logstorage.BlockColumn, rowIdx int) {
//line app/vlselect/logsql/query_response.qtpl:18 //line app/vlselect/logsql/query_response.qtpl:34
qw422016 := qt422016.AcquireWriter(qq422016) qw422016 := qt422016.AcquireWriter(qq422016)
//line app/vlselect/logsql/query_response.qtpl:18 //line app/vlselect/logsql/query_response.qtpl:34
StreamJSONRow(qw422016, columns, rowIdx) StreamJSONRow(qw422016, columns, rowIdx)
//line app/vlselect/logsql/query_response.qtpl:18 //line app/vlselect/logsql/query_response.qtpl:34
qt422016.ReleaseWriter(qw422016) qt422016.ReleaseWriter(qw422016)
//line app/vlselect/logsql/query_response.qtpl:18 //line app/vlselect/logsql/query_response.qtpl:34
} }
//line app/vlselect/logsql/query_response.qtpl:18 //line app/vlselect/logsql/query_response.qtpl:34
func JSONRow(columns []logstorage.BlockColumn, rowIdx int) string { func JSONRow(columns []logstorage.BlockColumn, rowIdx int) string {
//line app/vlselect/logsql/query_response.qtpl:18 //line app/vlselect/logsql/query_response.qtpl:34
qb422016 := qt422016.AcquireByteBuffer() qb422016 := qt422016.AcquireByteBuffer()
//line app/vlselect/logsql/query_response.qtpl:18 //line app/vlselect/logsql/query_response.qtpl:34
WriteJSONRow(qb422016, columns, rowIdx) WriteJSONRow(qb422016, columns, rowIdx)
//line app/vlselect/logsql/query_response.qtpl:18 //line app/vlselect/logsql/query_response.qtpl:34
qs422016 := string(qb422016.B) qs422016 := string(qb422016.B)
//line app/vlselect/logsql/query_response.qtpl:18 //line app/vlselect/logsql/query_response.qtpl:34
qt422016.ReleaseByteBuffer(qb422016) qt422016.ReleaseByteBuffer(qb422016)
//line app/vlselect/logsql/query_response.qtpl:18 //line app/vlselect/logsql/query_response.qtpl:34
return qs422016 return qs422016
//line app/vlselect/logsql/query_response.qtpl:18 //line app/vlselect/logsql/query_response.qtpl:34
} }
// JSONRows prints formatted rows // JSONRows prints formatted rows
//line app/vlselect/logsql/query_response.qtpl:21 //line app/vlselect/logsql/query_response.qtpl:37
func StreamJSONRows(qw422016 *qt422016.Writer, rows [][]logstorage.Field) { func StreamJSONRows(qw422016 *qt422016.Writer, rows [][]logstorage.Field) {
//line app/vlselect/logsql/query_response.qtpl:22 //line app/vlselect/logsql/query_response.qtpl:38
if len(rows) == 0 { if len(rows) == 0 {
//line app/vlselect/logsql/query_response.qtpl:23 //line app/vlselect/logsql/query_response.qtpl:39
return return
//line app/vlselect/logsql/query_response.qtpl:24 //line app/vlselect/logsql/query_response.qtpl:40
} }
//line app/vlselect/logsql/query_response.qtpl:25 //line app/vlselect/logsql/query_response.qtpl:41
for _, fields := range rows { for _, fields := range rows {
//line app/vlselect/logsql/query_response.qtpl:25 //line app/vlselect/logsql/query_response.qtpl:42
fields = logstorage.SkipLeadingFieldsWithoutValues(fields)
//line app/vlselect/logsql/query_response.qtpl:43
if len(fields) == 0 {
//line app/vlselect/logsql/query_response.qtpl:44
continue
//line app/vlselect/logsql/query_response.qtpl:45
}
//line app/vlselect/logsql/query_response.qtpl:45
qw422016.N().S(`{`) qw422016.N().S(`{`)
//line app/vlselect/logsql/query_response.qtpl:27 //line app/vlselect/logsql/query_response.qtpl:47
if len(fields) > 0 { if len(fields) > 0 {
//line app/vlselect/logsql/query_response.qtpl:29 //line app/vlselect/logsql/query_response.qtpl:49
f := fields[0] f := fields[0]
fields = fields[1:] fields = fields[1:]
//line app/vlselect/logsql/query_response.qtpl:32 //line app/vlselect/logsql/query_response.qtpl:52
qw422016.N().Q(f.Name) qw422016.N().Q(f.Name)
//line app/vlselect/logsql/query_response.qtpl:32 //line app/vlselect/logsql/query_response.qtpl:52
qw422016.N().S(`:`) qw422016.N().S(`:`)
//line app/vlselect/logsql/query_response.qtpl:32 //line app/vlselect/logsql/query_response.qtpl:52
qw422016.N().Q(f.Value) qw422016.N().Q(f.Value)
//line app/vlselect/logsql/query_response.qtpl:33 //line app/vlselect/logsql/query_response.qtpl:53
for _, f := range fields { for _, f := range fields {
//line app/vlselect/logsql/query_response.qtpl:33 //line app/vlselect/logsql/query_response.qtpl:54
if f.Value == "" {
//line app/vlselect/logsql/query_response.qtpl:55
continue
//line app/vlselect/logsql/query_response.qtpl:56
}
//line app/vlselect/logsql/query_response.qtpl:56
qw422016.N().S(`,`) qw422016.N().S(`,`)
//line app/vlselect/logsql/query_response.qtpl:34 //line app/vlselect/logsql/query_response.qtpl:57
qw422016.N().Q(f.Name) qw422016.N().Q(f.Name)
//line app/vlselect/logsql/query_response.qtpl:34 //line app/vlselect/logsql/query_response.qtpl:57
qw422016.N().S(`:`) qw422016.N().S(`:`)
//line app/vlselect/logsql/query_response.qtpl:34 //line app/vlselect/logsql/query_response.qtpl:57
qw422016.N().Q(f.Value) qw422016.N().Q(f.Value)
//line app/vlselect/logsql/query_response.qtpl:35 //line app/vlselect/logsql/query_response.qtpl:58
} }
//line app/vlselect/logsql/query_response.qtpl:36 //line app/vlselect/logsql/query_response.qtpl:59
} }
//line app/vlselect/logsql/query_response.qtpl:36 //line app/vlselect/logsql/query_response.qtpl:59
qw422016.N().S(`}`) qw422016.N().S(`}`)
//line app/vlselect/logsql/query_response.qtpl:37 //line app/vlselect/logsql/query_response.qtpl:60
qw422016.N().S(` qw422016.N().S(`
`) `)
//line app/vlselect/logsql/query_response.qtpl:38 //line app/vlselect/logsql/query_response.qtpl:61
} }
//line app/vlselect/logsql/query_response.qtpl:39 //line app/vlselect/logsql/query_response.qtpl:62
} }
//line app/vlselect/logsql/query_response.qtpl:39 //line app/vlselect/logsql/query_response.qtpl:62
func WriteJSONRows(qq422016 qtio422016.Writer, rows [][]logstorage.Field) { func WriteJSONRows(qq422016 qtio422016.Writer, rows [][]logstorage.Field) {
//line app/vlselect/logsql/query_response.qtpl:39 //line app/vlselect/logsql/query_response.qtpl:62
qw422016 := qt422016.AcquireWriter(qq422016) qw422016 := qt422016.AcquireWriter(qq422016)
//line app/vlselect/logsql/query_response.qtpl:39 //line app/vlselect/logsql/query_response.qtpl:62
StreamJSONRows(qw422016, rows) StreamJSONRows(qw422016, rows)
//line app/vlselect/logsql/query_response.qtpl:39 //line app/vlselect/logsql/query_response.qtpl:62
qt422016.ReleaseWriter(qw422016) qt422016.ReleaseWriter(qw422016)
//line app/vlselect/logsql/query_response.qtpl:39 //line app/vlselect/logsql/query_response.qtpl:62
} }
//line app/vlselect/logsql/query_response.qtpl:39 //line app/vlselect/logsql/query_response.qtpl:62
func JSONRows(rows [][]logstorage.Field) string { func JSONRows(rows [][]logstorage.Field) string {
//line app/vlselect/logsql/query_response.qtpl:39 //line app/vlselect/logsql/query_response.qtpl:62
qb422016 := qt422016.AcquireByteBuffer() qb422016 := qt422016.AcquireByteBuffer()
//line app/vlselect/logsql/query_response.qtpl:39 //line app/vlselect/logsql/query_response.qtpl:62
WriteJSONRows(qb422016, rows) WriteJSONRows(qb422016, rows)
//line app/vlselect/logsql/query_response.qtpl:39 //line app/vlselect/logsql/query_response.qtpl:62
qs422016 := string(qb422016.B) qs422016 := string(qb422016.B)
//line app/vlselect/logsql/query_response.qtpl:39 //line app/vlselect/logsql/query_response.qtpl:62
qt422016.ReleaseByteBuffer(qb422016) qt422016.ReleaseByteBuffer(qb422016)
//line app/vlselect/logsql/query_response.qtpl:39 //line app/vlselect/logsql/query_response.qtpl:62
return qs422016 return qs422016
//line app/vlselect/logsql/query_response.qtpl:39 //line app/vlselect/logsql/query_response.qtpl:62
} }

View file

@ -1,13 +1,13 @@
{ {
"files": { "files": {
"main.css": "./static/css/main.cbbca000.css", "main.css": "./static/css/main.faf86aa5.css",
"main.js": "./static/js/main.3d2eb957.js", "main.js": "./static/js/main.2810cc52.js",
"static/js/685.f772060c.chunk.js": "./static/js/685.f772060c.chunk.js", "static/js/685.f772060c.chunk.js": "./static/js/685.f772060c.chunk.js",
"static/media/MetricsQL.md": "./static/media/MetricsQL.a00044c91d9781cf8557.md", "static/media/MetricsQL.md": "./static/media/MetricsQL.a00044c91d9781cf8557.md",
"index.html": "./index.html" "index.html": "./index.html"
}, },
"entrypoints": [ "entrypoints": [
"static/css/main.cbbca000.css", "static/css/main.faf86aa5.css",
"static/js/main.3d2eb957.js" "static/js/main.2810cc52.js"
] ]
} }

View file

@ -0,0 +1,5 @@
{
"license": {
"type": "opensource"
}
}

View file

@ -1 +1 @@
<!doctype html><html lang="en"><head><meta charset="utf-8"/><link rel="icon" href="./favicon.svg"/><link rel="apple-touch-icon" href="./favicon.svg"/><link rel="mask-icon" href="./favicon.svg" color="#000000"><meta name="viewport" content="width=device-width,initial-scale=1,maximum-scale=5"/><meta name="theme-color" content="#000000"/><meta name="description" content="Explore your log data with VictoriaLogs UI"/><link rel="manifest" href="./manifest.json"/><title>UI for VictoriaLogs</title><meta name="twitter:card" content="summary"><meta name="twitter:title" content="UI for VictoriaLogs"><meta name="twitter:site" content="@https://victoriametrics.com/products/victorialogs/"><meta name="twitter:description" content="Explore your log data with VictoriaLogs UI"><meta name="twitter:image" content="./preview.jpg"><meta property="og:type" content="website"><meta property="og:title" content="UI for VictoriaLogs"><meta property="og:url" content="https://victoriametrics.com/products/victorialogs/"><meta property="og:description" content="Explore your log data with VictoriaLogs UI"><script defer="defer" src="./static/js/main.3d2eb957.js"></script><link href="./static/css/main.cbbca000.css" rel="stylesheet"></head><body><noscript>You need to enable JavaScript to run this app.</noscript><div id="root"></div></body></html> <!doctype html><html lang="en"><head><meta charset="utf-8"/><link rel="icon" href="./favicon.svg"/><link rel="apple-touch-icon" href="./favicon.svg"/><link rel="mask-icon" href="./favicon.svg" color="#000000"><meta name="viewport" content="width=device-width,initial-scale=1,maximum-scale=5"/><meta name="theme-color" content="#000000"/><meta name="description" content="Explore your log data with VictoriaLogs UI"/><link rel="manifest" href="./manifest.json"/><title>UI for VictoriaLogs</title><meta name="twitter:card" content="summary"><meta name="twitter:title" content="UI for VictoriaLogs"><meta name="twitter:site" content="@https://victoriametrics.com/products/victorialogs/"><meta name="twitter:description" content="Explore your log data with VictoriaLogs UI"><meta name="twitter:image" content="./preview.jpg"><meta property="og:type" content="website"><meta property="og:title" content="UI for VictoriaLogs"><meta property="og:url" content="https://victoriametrics.com/products/victorialogs/"><meta property="og:description" content="Explore your log data with VictoriaLogs UI"><script defer="defer" src="./static/js/main.2810cc52.js"></script><link href="./static/css/main.faf86aa5.css" rel="stylesheet"></head><body><noscript>You need to enable JavaScript to run this app.</noscript><div id="root"></div></body></html>

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View file

@ -18,12 +18,12 @@ import (
) )
var ( var (
retentionPeriod = flagutil.NewDuration("retentionPeriod", "7d", "Log entries with timestamps older than now-retentionPeriod are automatically deleted; "+ retentionPeriod = flagutil.NewRetentionDuration("retentionPeriod", "7d", "Log entries with timestamps older than now-retentionPeriod are automatically deleted; "+
"log entries with timestamps outside the retention are also rejected during data ingestion; the minimum supported retention is 1d (one day); "+ "log entries with timestamps outside the retention are also rejected during data ingestion; the minimum supported retention is 1d (one day); "+
"see https://docs.victoriametrics.com/victorialogs/#retention ; see also -retention.maxDiskSpaceUsageBytes") "see https://docs.victoriametrics.com/victorialogs/#retention ; see also -retention.maxDiskSpaceUsageBytes")
maxDiskSpaceUsageBytes = flagutil.NewBytes("retention.maxDiskSpaceUsageBytes", 0, "The maximum disk space usage at -storageDataPath before older per-day "+ maxDiskSpaceUsageBytes = flagutil.NewBytes("retention.maxDiskSpaceUsageBytes", 0, "The maximum disk space usage at -storageDataPath before older per-day "+
"partitions are automatically dropped; see https://docs.victoriametrics.com/victorialogs/#retention-by-disk-space-usage ; see also -retentionPeriod") "partitions are automatically dropped; see https://docs.victoriametrics.com/victorialogs/#retention-by-disk-space-usage ; see also -retentionPeriod")
futureRetention = flagutil.NewDuration("futureRetention", "2d", "Log entries with timestamps bigger than now+futureRetention are rejected during data ingestion; "+ futureRetention = flagutil.NewRetentionDuration("futureRetention", "2d", "Log entries with timestamps bigger than now+futureRetention are rejected during data ingestion; "+
"see https://docs.victoriametrics.com/victorialogs/#retention") "see https://docs.victoriametrics.com/victorialogs/#retention")
storageDataPath = flag.String("storageDataPath", "victoria-logs-data", "Path to directory where to store VictoriaLogs data; "+ storageDataPath = flag.String("storageDataPath", "victoria-logs-data", "Path to directory where to store VictoriaLogs data; "+
"see https://docs.victoriametrics.com/victorialogs/#storage") "see https://docs.victoriametrics.com/victorialogs/#storage")
@ -37,6 +37,8 @@ var (
"see https://docs.victoriametrics.com/victorialogs/data-ingestion/ ; see also -logNewStreams") "see https://docs.victoriametrics.com/victorialogs/data-ingestion/ ; see also -logNewStreams")
minFreeDiskSpaceBytes = flagutil.NewBytes("storage.minFreeDiskSpaceBytes", 10e6, "The minimum free disk space at -storageDataPath after which "+ minFreeDiskSpaceBytes = flagutil.NewBytes("storage.minFreeDiskSpaceBytes", 10e6, "The minimum free disk space at -storageDataPath after which "+
"the storage stops accepting new data") "the storage stops accepting new data")
forceMergeAuthKey = flagutil.NewPassword("forceMergeAuthKey", "authKey, which must be passed in query string to /internal/force_merge pages. It overrides -httpAuth.*")
) )
// Init initializes vlstorage. // Init initializes vlstorage.
@ -87,6 +89,28 @@ func Stop() {
strg = nil strg = nil
} }
// RequestHandler is a storage request handler.
func RequestHandler(w http.ResponseWriter, r *http.Request) bool {
path := r.URL.Path
if path == "/internal/force_merge" {
if !httpserver.CheckAuthFlag(w, r, forceMergeAuthKey) {
return true
}
// Run force merge in background
partitionNamePrefix := r.FormValue("partition_prefix")
go func() {
activeForceMerges.Inc()
defer activeForceMerges.Dec()
logger.Infof("forced merge for partition_prefix=%q has been started", partitionNamePrefix)
startTime := time.Now()
strg.MustForceMerge(partitionNamePrefix)
logger.Infof("forced merge for partition_prefix=%q has been successfully finished in %.3f seconds", partitionNamePrefix, time.Since(startTime).Seconds())
}()
return true
}
return false
}
var strg *logstorage.Storage var strg *logstorage.Storage
var storageMetrics *metrics.Set var storageMetrics *metrics.Set
@ -205,3 +229,5 @@ func writeStorageMetrics(w io.Writer, strg *logstorage.Storage) {
metrics.WriteCounterUint64(w, `vl_rows_dropped_total{reason="too_big_timestamp"}`, ss.RowsDroppedTooBigTimestamp) metrics.WriteCounterUint64(w, `vl_rows_dropped_total{reason="too_big_timestamp"}`, ss.RowsDroppedTooBigTimestamp)
metrics.WriteCounterUint64(w, `vl_rows_dropped_total{reason="too_small_timestamp"}`, ss.RowsDroppedTooSmallTimestamp) metrics.WriteCounterUint64(w, `vl_rows_dropped_total{reason="too_small_timestamp"}`, ss.RowsDroppedTooSmallTimestamp)
} }
var activeForceMerges = metrics.NewCounter("vl_active_force_merges")

View file

@ -36,7 +36,7 @@ var (
// //
// See https://github.com/influxdata/telegraf/tree/master/plugins/inputs/socket_listener/ // See https://github.com/influxdata/telegraf/tree/master/plugins/inputs/socket_listener/
func InsertHandlerForReader(at *auth.Token, r io.Reader, isGzipped bool) error { func InsertHandlerForReader(at *auth.Token, r io.Reader, isGzipped bool) error {
return stream.Parse(r, isGzipped, "", "", func(db string, rows []parser.Row) error { return stream.Parse(r, true, isGzipped, "", "", func(db string, rows []parser.Row) error {
return insertRows(at, db, rows, nil) return insertRows(at, db, rows, nil)
}) })
} }
@ -50,11 +50,12 @@ func InsertHandlerForHTTP(at *auth.Token, req *http.Request) error {
return err return err
} }
isGzipped := req.Header.Get("Content-Encoding") == "gzip" isGzipped := req.Header.Get("Content-Encoding") == "gzip"
isStreamMode := req.Header.Get("Stream-Mode") == "1"
q := req.URL.Query() q := req.URL.Query()
precision := q.Get("precision") precision := q.Get("precision")
// Read db tag from https://docs.influxdata.com/influxdb/v1.7/tools/api/#write-http-endpoint // Read db tag from https://docs.influxdata.com/influxdb/v1.7/tools/api/#write-http-endpoint
db := q.Get("db") db := q.Get("db")
return stream.Parse(req.Body, isGzipped, precision, db, func(db string, rows []parser.Row) error { return stream.Parse(req.Body, isStreamMode, isGzipped, precision, db, func(db string, rows []parser.Row) error {
return insertRows(at, db, rows, extraLabels) return insertRows(at, db, rows, extraLabels)
}) })
} }

View file

@ -35,7 +35,7 @@ var (
"By default, the rate limit is disabled. It can be useful for limiting load on remote storage when big amounts of buffered data "+ "By default, the rate limit is disabled. It can be useful for limiting load on remote storage when big amounts of buffered data "+
"is sent after temporary unavailability of the remote storage. See also -maxIngestionRate") "is sent after temporary unavailability of the remote storage. See also -maxIngestionRate")
sendTimeout = flagutil.NewArrayDuration("remoteWrite.sendTimeout", time.Minute, "Timeout for sending a single block of data to the corresponding -remoteWrite.url") sendTimeout = flagutil.NewArrayDuration("remoteWrite.sendTimeout", time.Minute, "Timeout for sending a single block of data to the corresponding -remoteWrite.url")
retryMinInterval = flagutil.NewArrayDuration("remoteWrite.retryMinInterval", time.Second, "The minimum delay between retry attempts to send a block of data to the corresponding -remoteWrite.url. Every next retry attempt will double the delay to prevent hammering of remote database. See also -remoteWrite.retryMaxInterval") retryMinInterval = flagutil.NewArrayDuration("remoteWrite.retryMinInterval", time.Second, "The minimum delay between retry attempts to send a block of data to the corresponding -remoteWrite.url. Every next retry attempt will double the delay to prevent hammering of remote database. See also -remoteWrite.retryMaxTime")
retryMaxTime = flagutil.NewArrayDuration("remoteWrite.retryMaxTime", time.Minute, "The max time spent on retry attempts to send a block of data to the corresponding -remoteWrite.url. Change this value if it is expected for -remoteWrite.url to be unreachable for more than -remoteWrite.retryMaxTime. See also -remoteWrite.retryMinInterval") retryMaxTime = flagutil.NewArrayDuration("remoteWrite.retryMaxTime", time.Minute, "The max time spent on retry attempts to send a block of data to the corresponding -remoteWrite.url. Change this value if it is expected for -remoteWrite.url to be unreachable for more than -remoteWrite.retryMaxTime. See also -remoteWrite.retryMinInterval")
proxyURL = flagutil.NewArrayString("remoteWrite.proxyURL", "Optional proxy URL for writing data to the corresponding -remoteWrite.url. "+ proxyURL = flagutil.NewArrayString("remoteWrite.proxyURL", "Optional proxy URL for writing data to the corresponding -remoteWrite.url. "+
"Supported proxies: http, https, socks5. Example: -remoteWrite.proxyURL=socks5://proxy:1234") "Supported proxies: http, https, socks5. Example: -remoteWrite.proxyURL=socks5://proxy:1234")

View file

@ -24,7 +24,7 @@ var (
streamAggrGlobalDropInput = flag.Bool("streamAggr.dropInput", false, "Whether to drop all the input samples after the aggregation "+ streamAggrGlobalDropInput = flag.Bool("streamAggr.dropInput", false, "Whether to drop all the input samples after the aggregation "+
"with -remoteWrite.streamAggr.config. By default, only aggregates samples are dropped, while the remaining samples "+ "with -remoteWrite.streamAggr.config. By default, only aggregates samples are dropped, while the remaining samples "+
"are written to remote storages write. See also -streamAggr.keepInput and https://docs.victoriametrics.com/stream-aggregation/") "are written to remote storages write. See also -streamAggr.keepInput and https://docs.victoriametrics.com/stream-aggregation/")
streamAggrGlobalDedupInterval = flagutil.NewDuration("streamAggr.dedupInterval", "0s", "Input samples are de-duplicated with this interval on "+ streamAggrGlobalDedupInterval = flag.Duration("streamAggr.dedupInterval", 0, "Input samples are de-duplicated with this interval on "+
"aggregator before optional aggregation with -streamAggr.config . "+ "aggregator before optional aggregation with -streamAggr.config . "+
"See also -dedup.minScrapeInterval and https://docs.victoriametrics.com/stream-aggregation/#deduplication") "See also -dedup.minScrapeInterval and https://docs.victoriametrics.com/stream-aggregation/#deduplication")
streamAggrGlobalIgnoreOldSamples = flag.Bool("streamAggr.ignoreOldSamples", false, "Whether to ignore input samples with old timestamps outside the "+ streamAggrGlobalIgnoreOldSamples = flag.Bool("streamAggr.ignoreOldSamples", false, "Whether to ignore input samples with old timestamps outside the "+
@ -133,7 +133,7 @@ func initStreamAggrConfigGlobal() {
metrics.GetOrCreateCounter(fmt.Sprintf(`vmagent_streamaggr_config_reload_successful{path=%q}`, filePath)).Set(1) metrics.GetOrCreateCounter(fmt.Sprintf(`vmagent_streamaggr_config_reload_successful{path=%q}`, filePath)).Set(1)
metrics.GetOrCreateCounter(fmt.Sprintf(`vmagent_streamaggr_config_reload_success_timestamp_seconds{path=%q}`, filePath)).Set(fasttime.UnixTimestamp()) metrics.GetOrCreateCounter(fmt.Sprintf(`vmagent_streamaggr_config_reload_success_timestamp_seconds{path=%q}`, filePath)).Set(fasttime.UnixTimestamp())
} }
dedupInterval := streamAggrGlobalDedupInterval.Duration() dedupInterval := *streamAggrGlobalDedupInterval
if dedupInterval > 0 { if dedupInterval > 0 {
deduplicatorGlobal = streamaggr.NewDeduplicator(pushToRemoteStoragesTrackDropped, dedupInterval, *streamAggrGlobalDropInputLabels, "dedup-global") deduplicatorGlobal = streamaggr.NewDeduplicator(pushToRemoteStoragesTrackDropped, dedupInterval, *streamAggrGlobalDropInputLabels, "dedup-global")
} }
@ -202,7 +202,7 @@ func newStreamAggrConfigGlobal() (*streamaggr.Aggregators, error) {
} }
opts := &streamaggr.Options{ opts := &streamaggr.Options{
DedupInterval: streamAggrGlobalDedupInterval.Duration(), DedupInterval: *streamAggrGlobalDedupInterval,
DropInputLabels: *streamAggrGlobalDropInputLabels, DropInputLabels: *streamAggrGlobalDropInputLabels,
IgnoreOldSamples: *streamAggrGlobalIgnoreOldSamples, IgnoreOldSamples: *streamAggrGlobalIgnoreOldSamples,
IgnoreFirstIntervals: *streamAggrGlobalIgnoreFirstIntervals, IgnoreFirstIntervals: *streamAggrGlobalIgnoreFirstIntervals,

View file

@ -43,18 +43,33 @@ func httpWrite(address string, r io.Reader) {
// writeInputSeries send input series to vmstorage and flush them // writeInputSeries send input series to vmstorage and flush them
func writeInputSeries(input []series, interval *promutils.Duration, startStamp time.Time, dst string) error { func writeInputSeries(input []series, interval *promutils.Duration, startStamp time.Time, dst string) error {
r := testutil.WriteRequest{} r := testutil.WriteRequest{}
var err error
r.Timeseries, err = parseInputSeries(input, interval, startStamp)
if err != nil {
return err
}
data := testutil.Compress(r)
// write input series to vm
httpWrite(dst, bytes.NewBuffer(data))
vmstorage.Storage.DebugFlush()
return nil
}
func parseInputSeries(input []series, interval *promutils.Duration, startStamp time.Time) ([]testutil.TimeSeries, error) {
var res []testutil.TimeSeries
for _, data := range input { for _, data := range input {
expr, err := metricsql.Parse(data.Series) expr, err := metricsql.Parse(data.Series)
if err != nil { if err != nil {
return fmt.Errorf("failed to parse series %s: %v", data.Series, err) return res, fmt.Errorf("failed to parse series %s: %v", data.Series, err)
} }
promvals, err := parseInputValue(data.Values, true) promvals, err := parseInputValue(data.Values, true)
if err != nil { if err != nil {
return fmt.Errorf("failed to parse input series value %s: %v", data.Values, err) return res, fmt.Errorf("failed to parse input series value %s: %v", data.Values, err)
} }
metricExpr, ok := expr.(*metricsql.MetricExpr) metricExpr, ok := expr.(*metricsql.MetricExpr)
if !ok { if !ok || len(metricExpr.LabelFilterss) != 1 {
return fmt.Errorf("failed to parse series %s to metric expr: %v", data.Series, err) return res, fmt.Errorf("got invalid input series %s: %v", data.Series, err)
} }
samples := make([]testutil.Sample, 0, len(promvals)) samples := make([]testutil.Sample, 0, len(promvals))
ts := startStamp ts := startStamp
@ -71,14 +86,9 @@ func writeInputSeries(input []series, interval *promutils.Duration, startStamp t
for _, filter := range metricExpr.LabelFilterss[0] { for _, filter := range metricExpr.LabelFilterss[0] {
ls = append(ls, testutil.Label{Name: filter.Label, Value: filter.Value}) ls = append(ls, testutil.Label{Name: filter.Label, Value: filter.Value})
} }
r.Timeseries = append(r.Timeseries, testutil.TimeSeries{Labels: ls, Samples: samples}) res = append(res, testutil.TimeSeries{Labels: ls, Samples: samples})
} }
return res, nil
data := testutil.Compress(r)
// write input series to vm
httpWrite(dst, bytes.NewBuffer(data))
vmstorage.Storage.DebugFlush()
return nil
} }
// parseInputValue support input like "1", "1+1x1 _ -4 3+20x1", see more examples in test. // parseInputValue support input like "1", "1+1x1 _ -4 3+20x1", see more examples in test.

View file

@ -2,8 +2,10 @@ package unittest
import ( import (
"testing" "testing"
"time"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/decimal" "github.com/VictoriaMetrics/VictoriaMetrics/lib/decimal"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/promutils"
) )
func TestParseInputValue_Failure(t *testing.T) { func TestParseInputValue_Failure(t *testing.T) {
@ -43,7 +45,7 @@ func TestParseInputValue_Success(t *testing.T) {
if decimal.IsStaleNaN(outputExpected[i].Value) && decimal.IsStaleNaN(output[i].Value) { if decimal.IsStaleNaN(outputExpected[i].Value) && decimal.IsStaleNaN(output[i].Value) {
continue continue
} }
t.Fatalf("unexpeccted Value field in the output\ngot\n%v\nwant\n%v", output, outputExpected) t.Fatalf("unexpected Value field in the output\ngot\n%v\nwant\n%v", output, outputExpected)
} }
} }
} }
@ -64,3 +66,34 @@ func TestParseInputValue_Success(t *testing.T) {
f("1+1x1 _ -4 stale 3+20x1", []sequenceValue{{Value: 1}, {Value: 2}, {Omitted: true}, {Value: -4}, {Value: decimal.StaleNaN}, {Value: 3}, {Value: 23}}) f("1+1x1 _ -4 stale 3+20x1", []sequenceValue{{Value: 1}, {Value: 2}, {Omitted: true}, {Value: -4}, {Value: decimal.StaleNaN}, {Value: 3}, {Value: 23}})
} }
func TestParseInputSeries_Success(t *testing.T) {
f := func(input []series) {
t.Helper()
var interval promutils.Duration
_, err := parseInputSeries(input, &interval, time.Now())
if err != nil {
t.Fatalf("expect to see no error: %v", err)
}
}
f([]series{{Series: "test", Values: "1"}})
f([]series{{Series: "test{}", Values: "1"}})
f([]series{{Series: "test{env=\"prod\",job=\"a\" }", Values: "1"}})
f([]series{{Series: "{__name__=\"test\",env=\"prod\",job=\"a\" }", Values: "1"}})
}
func TestParseInputSeries_Fail(t *testing.T) {
f := func(input []series) {
t.Helper()
var interval promutils.Duration
_, err := parseInputSeries(input, &interval, time.Now())
if err == nil {
t.Fatalf("expect to see error: %v", err)
}
}
f([]series{{Series: "", Values: "1"}})
f([]series{{Series: "{}", Values: "1"}})
f([]series{{Series: "{env=\"prod\",job=\"a\" or env=\"dev\",job=\"b\"}", Values: "1"}})
}

View file

@ -57,16 +57,18 @@ Outer:
continue Outer continue Outer
} }
metricsqlMetricExpr, ok := metricsqlExpr.(*metricsql.MetricExpr) metricsqlMetricExpr, ok := metricsqlExpr.(*metricsql.MetricExpr)
if !ok { if !ok || len(metricsqlMetricExpr.LabelFilterss) > 1 {
checkErrs = append(checkErrs, fmt.Errorf("\n expr: %q, time: %s, err: %v", mt.Expr, checkErrs = append(checkErrs, fmt.Errorf("\n expr: %q, time: %s, err: %v", mt.Expr,
mt.EvalTime.Duration().String(), fmt.Errorf("got unsupported metricsql type"))) mt.EvalTime.Duration().String(), fmt.Errorf("got invalid exp_samples: %q", s.Labels)))
continue Outer continue Outer
} }
for _, l := range metricsqlMetricExpr.LabelFilterss[0] { if len(metricsqlMetricExpr.LabelFilterss) > 0 {
expLb = append(expLb, datasource.Label{ for _, l := range metricsqlMetricExpr.LabelFilterss[0] {
Name: l.Label, expLb = append(expLb, datasource.Label{
Value: l.Value, Name: l.Label,
}) Value: l.Value,
})
}
} }
} }
sort.Slice(expLb, func(i, j int) bool { sort.Slice(expLb, func(i, j int) bool {

View file

@ -250,7 +250,7 @@ checkCheck:
if readyCheckFunc() { if readyCheckFunc() {
break checkCheck break checkCheck
} }
time.Sleep(3 * time.Second) time.Sleep(100 * time.Millisecond)
} }
} }
} }

View file

@ -1,19 +1,20 @@
package config package config
import ( import (
"bytes"
"crypto/md5" "crypto/md5"
"fmt" "fmt"
"hash/fnv" "hash/fnv"
"io"
"net/url" "net/url"
"sort" "sort"
"strings" "strings"
"gopkg.in/yaml.v2"
"github.com/VictoriaMetrics/VictoriaMetrics/app/vmalert/config/log" "github.com/VictoriaMetrics/VictoriaMetrics/app/vmalert/config/log"
"github.com/VictoriaMetrics/VictoriaMetrics/app/vmalert/utils" "github.com/VictoriaMetrics/VictoriaMetrics/app/vmalert/utils"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/envtemplate" "github.com/VictoriaMetrics/VictoriaMetrics/lib/envtemplate"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/promutils" "github.com/VictoriaMetrics/VictoriaMetrics/lib/promutils"
"gopkg.in/yaml.v2"
) )
// Group contains list of Rules grouped into // Group contains list of Rules grouped into
@ -298,16 +299,30 @@ func parseConfig(data []byte) ([]Group, error) {
if err != nil { if err != nil {
return nil, fmt.Errorf("cannot expand environment vars: %w", err) return nil, fmt.Errorf("cannot expand environment vars: %w", err)
} }
g := struct {
var result []Group
type cfgFile struct {
Groups []Group `yaml:"groups"` Groups []Group `yaml:"groups"`
// Catches all undefined fields and must be empty after parsing. // Catches all undefined fields and must be empty after parsing.
XXX map[string]any `yaml:",inline"` XXX map[string]any `yaml:",inline"`
}{}
err = yaml.Unmarshal(data, &g)
if err != nil {
return nil, err
} }
return g.Groups, checkOverflow(g.XXX, "config")
decoder := yaml.NewDecoder(bytes.NewReader(data))
for {
var cf cfgFile
if err = decoder.Decode(&cf); err != nil {
if err == io.EOF { // EOF indicates no more documents to read
break
}
return nil, err
}
if err = checkOverflow(cf.XXX, "config"); err != nil {
return nil, err
}
result = append(result, cf.Groups...)
}
return result, nil
} }
func checkOverflow(m map[string]any, ctx string) error { func checkOverflow(m map[string]any, ctx string) error {

View file

@ -9,11 +9,10 @@ import (
"testing" "testing"
"time" "time"
"gopkg.in/yaml.v2"
"github.com/VictoriaMetrics/VictoriaMetrics/app/vmalert/notifier" "github.com/VictoriaMetrics/VictoriaMetrics/app/vmalert/notifier"
"github.com/VictoriaMetrics/VictoriaMetrics/app/vmalert/templates" "github.com/VictoriaMetrics/VictoriaMetrics/app/vmalert/templates"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/promutils" "github.com/VictoriaMetrics/VictoriaMetrics/lib/promutils"
"gopkg.in/yaml.v2"
) )
func TestMain(m *testing.M) { func TestMain(m *testing.M) {
@ -40,6 +39,34 @@ groups:
w.Write([]byte(` w.Write([]byte(`
groups: groups:
- name: TestGroup - name: TestGroup
rules:
- record: conns
expr: max(vm_tcplistener_conns)`))
})
mux.HandleFunc("/good-multi-doc", func(w http.ResponseWriter, _ *http.Request) {
w.Write([]byte(`
groups:
- name: foo
rules:
- record: conns
expr: max(vm_tcplistener_conns)
---
groups:
- name: bar
rules:
- record: conns
expr: max(vm_tcplistener_conns)`))
})
mux.HandleFunc("/bad-multi-doc", func(w http.ResponseWriter, _ *http.Request) {
w.Write([]byte(`
bad_field:
- name: foo
rules:
- record: conns
expr: max(vm_tcplistener_conns)
---
groups:
- name: bar
rules: rules:
- record: conns - record: conns
expr: max(vm_tcplistener_conns)`)) expr: max(vm_tcplistener_conns)`))
@ -48,13 +75,23 @@ groups:
srv := httptest.NewServer(mux) srv := httptest.NewServer(mux)
defer srv.Close() defer srv.Close()
if _, err := Parse([]string{srv.URL + "/good-alert", srv.URL + "/good-rr"}, notifier.ValidateTemplates, true); err != nil { f := func(urls []string, expErr bool) {
t.Fatalf("error parsing URLs %s", err) for i, u := range urls {
urls[i] = srv.URL + u
}
_, err := Parse(urls, notifier.ValidateTemplates, true)
if err != nil && !expErr {
t.Fatalf("error parsing URLs %s", err)
}
if err == nil && expErr {
t.Fatalf("expecting error parsing URLs but got none")
}
} }
if _, err := Parse([]string{srv.URL + "/bad"}, notifier.ValidateTemplates, true); err == nil { f([]string{"/good-alert", "/good-rr", "/good-multi-doc"}, false)
t.Fatalf("expected parsing error: %s", err) f([]string{"/bad"}, true)
} f([]string{"/bad-multi-doc"}, true)
f([]string{"/good-alert", "/bad"}, true)
} }
func TestParse_Success(t *testing.T) { func TestParse_Success(t *testing.T) {
@ -86,6 +123,8 @@ func TestParse_Failure(t *testing.T) {
f([]string{"testdata/dir/rules4-bad.rules"}, "either `record` or `alert` must be set") f([]string{"testdata/dir/rules4-bad.rules"}, "either `record` or `alert` must be set")
f([]string{"testdata/rules/rules1-bad.rules"}, "bad graphite expr") f([]string{"testdata/rules/rules1-bad.rules"}, "bad graphite expr")
f([]string{"testdata/dir/rules6-bad.rules"}, "missing ':' in header") f([]string{"testdata/dir/rules6-bad.rules"}, "missing ':' in header")
f([]string{"testdata/rules/rules-multi-doc-bad.rules"}, "unknown fields")
f([]string{"testdata/rules/rules-multi-doc-duplicates-bad.rules"}, "duplicate")
f([]string{"http://unreachable-url"}, "failed to") f([]string{"http://unreachable-url"}, "failed to")
} }

View file

@ -0,0 +1,29 @@
groups:
- name: groupTest
rules:
- alert: VMRows
for: 1ms
expr: vm_rows > 0
labels:
label: bar
host: "{{ $labels.instance }}"
annotations:
summary: "{{ $value }}"
invalid-field-1: invalid-value-1
invalid-field-2: invalid-value-2
---
groups:
- name: TestGroup
interval: 2s
concurrency: 2
type: graphite
rules:
- alert: Conns
expr: filterSeries(sumSeries(host.receiver.interface.cons),'last','>', 500)
for: 3m
annotations:
summary: Too high connection number for {{$labels.instance}}
description: "It is {{ $value }} connections for {{$labels.instance}}"
invalid-field-2: invalid-value-2
invalid-field-3: invalid-value-3

View file

@ -0,0 +1,11 @@
groups:
- name: foo
rules:
- alert: VMRows
expr: vm_rows > 0
---
groups:
- name: foo
rules:
- alert: VMRows
expr: vm_rows > 0

View file

@ -0,0 +1,15 @@
---
groups:
- name: groupTest
rules:
- alert: VMRows
for: 1ms
expr: vm_rows > 0
labels:
label: bar
host: "{{ $labels.instance }}"
annotations:
summary: "{{ $value }}"
---
groups:

View file

@ -0,0 +1,46 @@
---
groups:
- name: groupTest
rules:
- alert: VMRows
for: 1ms
expr: vm_rows > 0
labels:
label: bar
host: "{{ $labels.instance }}"
annotations:
summary: "{{ $value }}"
- name: groupTest-2
rules:
- alert: VMRows-2
for: 1ms
expr: vm_rows_2 > 0
labels:
label: bar2
host: "{{ $labels.instance }}"
annotations:
summary: "\n markdown result is : \n---\n # header\n body: \n text \n----\n"
---
groups:
- name: groupTest-3
rules:
- alert: VMRows-3
for: 1ms
expr: vm_rows_3 > 0
labels:
label: bar_3
host: "{{ $labels.instance }}"
annotations:
summary: "{{ $value }}"
- name: groupTest-4
rules:
- alert: VMRows-4
for: 1ms
expr: vm_rows_4 > 0
labels:
label: bar4
host: "{{ $labels.instance }}"
annotations:
summary: "{{ $value }}"
---
groups:

View file

@ -31,14 +31,14 @@ import (
) )
var ( var (
rulePath = flagutil.NewArrayString("rule", `Path to the files or http url with alerting and/or recording rules. rulePath = flagutil.NewArrayString("rule", `Path to the files or http url with alerting and/or recording rules in YAML format.
Supports hierarchical patterns and regexpes. Supports hierarchical patterns and regexpes.
Examples: Examples:
-rule="/path/to/file". Path to a single file with alerting rules. -rule="/path/to/file". Path to a single file with alerting rules.
-rule="http://<some-server-addr>/path/to/rules". HTTP URL to a page with alerting rules. -rule="http://<some-server-addr>/path/to/rules". HTTP URL to a page with alerting rules.
-rule="dir/*.yaml" -rule="/*.yaml" -rule="gcs://vmalert-rules/tenant_%{TENANT_ID}/prod". -rule="dir/*.yaml" -rule="/*.yaml" -rule="gcs://vmalert-rules/tenant_%{TENANT_ID}/prod".
-rule="dir/**/*.yaml". Includes all the .yaml files in "dir" subfolders recursively. -rule="dir/**/*.yaml". Includes all the .yaml files in "dir" subfolders recursively.
Rule files may contain %{ENV_VAR} placeholders, which are substituted by the corresponding env vars. Rule files support YAML multi-document. Files may contain %{ENV_VAR} placeholders, which are substituted by the corresponding env vars.
Enterprise version of vmalert supports S3 and GCS paths to rules. Enterprise version of vmalert supports S3 and GCS paths to rules.
For example: gs://bucket/path/to/rules, s3://bucket/path/to/rules For example: gs://bucket/path/to/rules, s3://bucket/path/to/rules

View file

@ -33,7 +33,7 @@ const (
var ( var (
disablePathAppend = flag.Bool("remoteWrite.disablePathAppend", false, "Whether to disable automatic appending of '/api/v1/write' path to the configured -remoteWrite.url.") disablePathAppend = flag.Bool("remoteWrite.disablePathAppend", false, "Whether to disable automatic appending of '/api/v1/write' path to the configured -remoteWrite.url.")
sendTimeout = flag.Duration("remoteWrite.sendTimeout", 30*time.Second, "Timeout for sending data to the configured -remoteWrite.url.") sendTimeout = flag.Duration("remoteWrite.sendTimeout", 30*time.Second, "Timeout for sending data to the configured -remoteWrite.url.")
retryMinInterval = flag.Duration("remoteWrite.retryMinInterval", time.Second, "The minimum delay between retry attempts. Every next retry attempt will double the delay to prevent hammering of remote database. See also -remoteWrite.retryMaxInterval") retryMinInterval = flag.Duration("remoteWrite.retryMinInterval", time.Second, "The minimum delay between retry attempts. Every next retry attempt will double the delay to prevent hammering of remote database. See also -remoteWrite.retryMaxTime")
retryMaxTime = flag.Duration("remoteWrite.retryMaxTime", time.Second*30, "The max time spent on retry attempts for the failed remote-write request. Change this value if it is expected for remoteWrite.url to be unreachable for more than -remoteWrite.retryMaxTime. See also -remoteWrite.retryMinInterval") retryMaxTime = flag.Duration("remoteWrite.retryMaxTime", time.Second*30, "The max time spent on retry attempts for the failed remote-write request. Change this value if it is expected for remoteWrite.url to be unreachable for more than -remoteWrite.retryMaxTime. See also -remoteWrite.retryMinInterval")
) )

View file

@ -324,14 +324,28 @@ func (g *Group) Start(ctx context.Context, nts func() []notifier.Notifier, rw re
g.infof("will start in %v", sleepBeforeStart) g.infof("will start in %v", sleepBeforeStart)
sleepTimer := time.NewTimer(sleepBeforeStart) sleepTimer := time.NewTimer(sleepBeforeStart)
select { randSleep:
case <-ctx.Done(): for {
sleepTimer.Stop() select {
return case <-ctx.Done():
case <-g.doneCh: sleepTimer.Stop()
sleepTimer.Stop() return
return case <-g.doneCh:
case <-sleepTimer.C: sleepTimer.Stop()
return
case ng := <-g.updateCh:
g.mu.Lock()
err := g.updateWith(ng)
if err != nil {
logger.Errorf("group %q: failed to update: %s", g.Name, err)
g.mu.Unlock()
continue
}
g.mu.Unlock()
g.infof("reload successfully")
case <-sleepTimer.C:
break randSleep
}
} }
evalTS = evalTS.Add(sleepBeforeStart) evalTS = evalTS.Add(sleepBeforeStart)
} }

View file

@ -175,6 +175,74 @@ func TestUpdateWith(t *testing.T) {
}) })
} }
func TestUpdateDuringRandSleep(t *testing.T) {
// enable rand sleep to test group update during sleep
SkipRandSleepOnGroupStart = false
defer func() {
SkipRandSleepOnGroupStart = true
}()
rule := AlertingRule{
Name: "jobDown",
Expr: "up==0",
Labels: map[string]string{
"foo": "bar",
},
}
g := &Group{
Name: "test",
Rules: []Rule{
&rule,
},
// big interval ensures big enough randSleep during start process
Interval: 100 * time.Hour,
updateCh: make(chan *Group),
}
go g.Start(context.Background(), nil, nil, nil)
rule1 := AlertingRule{
Name: "jobDown",
Expr: "up{job=\"vmagent\"}==0",
Labels: map[string]string{
"foo": "bar",
},
}
g1 := &Group{
Rules: []Rule{
&rule1,
},
}
g.updateCh <- g1
time.Sleep(10 * time.Millisecond)
g.mu.RLock()
if g.Rules[0].(*AlertingRule).Expr != "up{job=\"vmagent\"}==0" {
t.Fatalf("expected to have updated rule expr")
}
g.mu.RUnlock()
rule2 := AlertingRule{
Name: "jobDown",
Expr: "up{job=\"vmagent\"}==0",
Labels: map[string]string{
"foo": "bar",
"baz": "qux",
},
}
g2 := &Group{
Rules: []Rule{
&rule2,
},
}
g.updateCh <- g2
time.Sleep(10 * time.Millisecond)
g.mu.RLock()
if len(g.Rules[0].(*AlertingRule).Labels) != 2 {
t.Fatalf("expected to have updated labels")
}
g.mu.RUnlock()
g.Close()
}
func TestGroupStart(t *testing.T) { func TestGroupStart(t *testing.T) {
const ( const (
rules = ` rules = `

View file

@ -158,7 +158,7 @@ func (op *otsdbProcessor) do(s queryObj) error {
if len(data.Timestamps) < 1 || len(data.Values) < 1 { if len(data.Timestamps) < 1 || len(data.Values) < 1 {
return nil return nil
} }
labels := make([]vm.LabelPair, len(data.Tags)) labels := make([]vm.LabelPair, 0, len(data.Tags))
for k, v := range data.Tags { for k, v := range data.Tags {
labels = append(labels, vm.LabelPair{Name: k, Value: v}) labels = append(labels, vm.LabelPair{Name: k, Value: v})
} }

View file

@ -34,7 +34,7 @@ var (
// //
// See https://github.com/influxdata/telegraf/tree/master/plugins/inputs/socket_listener/ // See https://github.com/influxdata/telegraf/tree/master/plugins/inputs/socket_listener/
func InsertHandlerForReader(r io.Reader) error { func InsertHandlerForReader(r io.Reader) error {
return stream.Parse(r, false, "", "", func(db string, rows []parser.Row) error { return stream.Parse(r, true, false, "", "", func(db string, rows []parser.Row) error {
return insertRows(db, rows, nil) return insertRows(db, rows, nil)
}) })
} }
@ -48,11 +48,12 @@ func InsertHandlerForHTTP(req *http.Request) error {
return err return err
} }
isGzipped := req.Header.Get("Content-Encoding") == "gzip" isGzipped := req.Header.Get("Content-Encoding") == "gzip"
isStreamMode := req.Header.Get("Stream-Mode") == "1"
q := req.URL.Query() q := req.URL.Query()
precision := q.Get("precision") precision := q.Get("precision")
// Read db tag from https://docs.influxdata.com/influxdb/v1.7/tools/api/#write-http-endpoint // Read db tag from https://docs.influxdata.com/influxdb/v1.7/tools/api/#write-http-endpoint
db := q.Get("db") db := q.Get("db")
return stream.Parse(req.Body, isGzipped, precision, db, func(db string, rows []parser.Row) error { return stream.Parse(req.Body, isStreamMode, isGzipped, precision, db, func(db string, rows []parser.Row) error {
return insertRows(db, rows, extraLabels) return insertRows(db, rows, extraLabels)
}) })
} }

View file

@ -60,6 +60,7 @@ func Init() {
fs.RemoveDirContents(tmpDirPath) fs.RemoveDirContents(tmpDirPath)
netstorage.InitTmpBlocksDir(tmpDirPath) netstorage.InitTmpBlocksDir(tmpDirPath)
promql.InitRollupResultCache(*vmstorage.DataPath + "/cache/rollupResult") promql.InitRollupResultCache(*vmstorage.DataPath + "/cache/rollupResult")
prometheus.InitMaxUniqueTimeseries(*maxConcurrentRequests)
concurrencyLimitCh = make(chan struct{}, *maxConcurrentRequests) concurrencyLimitCh = make(chan struct{}, *maxConcurrentRequests)
initVMAlertProxy() initVMAlertProxy()
@ -82,6 +83,9 @@ var (
_ = metrics.NewGauge(`vm_concurrent_select_current`, func() float64 { _ = metrics.NewGauge(`vm_concurrent_select_current`, func() float64 {
return float64(len(concurrencyLimitCh)) return float64(len(concurrencyLimitCh))
}) })
_ = metrics.NewGauge(`vm_search_max_unique_timeseries`, func() float64 {
return float64(prometheus.GetMaxUniqueTimeSeries())
})
) )
//go:embed vmui //go:embed vmui

View file

@ -12,6 +12,10 @@ import (
"sync/atomic" "sync/atomic"
"time" "time"
"github.com/VictoriaMetrics/metrics"
"github.com/VictoriaMetrics/metricsql"
"github.com/valyala/fastjson/fastfloat"
"github.com/VictoriaMetrics/VictoriaMetrics/app/vmselect/netstorage" "github.com/VictoriaMetrics/VictoriaMetrics/app/vmselect/netstorage"
"github.com/VictoriaMetrics/VictoriaMetrics/app/vmselect/promql" "github.com/VictoriaMetrics/VictoriaMetrics/app/vmselect/promql"
"github.com/VictoriaMetrics/VictoriaMetrics/app/vmselect/querystats" "github.com/VictoriaMetrics/VictoriaMetrics/app/vmselect/querystats"
@ -23,11 +27,10 @@ import (
"github.com/VictoriaMetrics/VictoriaMetrics/lib/flagutil" "github.com/VictoriaMetrics/VictoriaMetrics/lib/flagutil"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/httpserver" "github.com/VictoriaMetrics/VictoriaMetrics/lib/httpserver"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/httputils" "github.com/VictoriaMetrics/VictoriaMetrics/lib/httputils"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/logger"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/memory"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/querytracer" "github.com/VictoriaMetrics/VictoriaMetrics/lib/querytracer"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/storage" "github.com/VictoriaMetrics/VictoriaMetrics/lib/storage"
"github.com/VictoriaMetrics/metrics"
"github.com/VictoriaMetrics/metricsql"
"github.com/valyala/fastjson/fastfloat"
) )
var ( var (
@ -47,7 +50,8 @@ var (
maxStepForPointsAdjustment = flag.Duration("search.maxStepForPointsAdjustment", time.Minute, "The maximum step when /api/v1/query_range handler adjusts "+ maxStepForPointsAdjustment = flag.Duration("search.maxStepForPointsAdjustment", time.Minute, "The maximum step when /api/v1/query_range handler adjusts "+
"points with timestamps closer than -search.latencyOffset to the current time. The adjustment is needed because such points may contain incomplete data") "points with timestamps closer than -search.latencyOffset to the current time. The adjustment is needed because such points may contain incomplete data")
maxUniqueTimeseries = flag.Int("search.maxUniqueTimeseries", 300e3, "The maximum number of unique time series, which can be selected during /api/v1/query and /api/v1/query_range queries. This option allows limiting memory usage") maxUniqueTimeseries = flag.Int("search.maxUniqueTimeseries", 0, "The maximum number of unique time series, which can be selected during /api/v1/query and /api/v1/query_range queries. This option allows limiting memory usage. "+
"When set to zero, the limit is automatically calculated based on -search.maxConcurrentRequests (inversely proportional) and memory available to the process (proportional).")
maxFederateSeries = flag.Int("search.maxFederateSeries", 1e6, "The maximum number of time series, which can be returned from /federate. This option allows limiting memory usage") maxFederateSeries = flag.Int("search.maxFederateSeries", 1e6, "The maximum number of time series, which can be returned from /federate. This option allows limiting memory usage")
maxExportSeries = flag.Int("search.maxExportSeries", 10e6, "The maximum number of time series, which can be returned from /api/v1/export* APIs. This option allows limiting memory usage") maxExportSeries = flag.Int("search.maxExportSeries", 10e6, "The maximum number of time series, which can be returned from /api/v1/export* APIs. This option allows limiting memory usage")
maxTSDBStatusSeries = flag.Int("search.maxTSDBStatusSeries", 10e6, "The maximum number of time series, which can be processed during the call to /api/v1/status/tsdb. This option allows limiting memory usage") maxTSDBStatusSeries = flag.Int("search.maxTSDBStatusSeries", 10e6, "The maximum number of time series, which can be processed during the call to /api/v1/status/tsdb. This option allows limiting memory usage")
@ -792,7 +796,7 @@ func QueryHandler(qt *querytracer.Tracer, startTime time.Time, w http.ResponseWr
End: start, End: start,
Step: step, Step: step,
MaxPointsPerSeries: *maxPointsPerTimeseries, MaxPointsPerSeries: *maxPointsPerTimeseries,
MaxSeries: *maxUniqueTimeseries, MaxSeries: GetMaxUniqueTimeSeries(),
QuotedRemoteAddr: httpserver.GetQuotedRemoteAddr(r), QuotedRemoteAddr: httpserver.GetQuotedRemoteAddr(r),
Deadline: deadline, Deadline: deadline,
MayCache: mayCache, MayCache: mayCache,
@ -900,7 +904,7 @@ func queryRangeHandler(qt *querytracer.Tracer, startTime time.Time, w http.Respo
End: end, End: end,
Step: step, Step: step,
MaxPointsPerSeries: *maxPointsPerTimeseries, MaxPointsPerSeries: *maxPointsPerTimeseries,
MaxSeries: *maxUniqueTimeseries, MaxSeries: GetMaxUniqueTimeSeries(),
QuotedRemoteAddr: httpserver.GetQuotedRemoteAddr(r), QuotedRemoteAddr: httpserver.GetQuotedRemoteAddr(r),
Deadline: deadline, Deadline: deadline,
MayCache: mayCache, MayCache: mayCache,
@ -1246,3 +1250,40 @@ func (sw *scalableWriter) flush() error {
}) })
return sw.bw.Flush() return sw.bw.Flush()
} }
var (
maxUniqueTimeseriesValueOnce sync.Once
maxUniqueTimeseriesValue int
)
// InitMaxUniqueTimeseries init the max metrics limit calculated by available resources.
// The calculation is split into calculateMaxUniqueTimeSeriesForResource for unit testing.
func InitMaxUniqueTimeseries(maxConcurrentRequests int) {
maxUniqueTimeseriesValueOnce.Do(func() {
maxUniqueTimeseriesValue = *maxUniqueTimeseries
if maxUniqueTimeseriesValue <= 0 {
maxUniqueTimeseriesValue = calculateMaxUniqueTimeSeriesForResource(maxConcurrentRequests, memory.Remaining())
}
})
}
// calculateMaxUniqueTimeSeriesForResource calculate the max metrics limit calculated by available resources.
func calculateMaxUniqueTimeSeriesForResource(maxConcurrentRequests, remainingMemory int) int {
if maxConcurrentRequests <= 0 {
// This line should NOT be reached unless the user has set an incorrect `search.maxConcurrentRequests`.
// In such cases, fallback to unlimited.
logger.Warnf("limiting -search.maxUniqueTimeseries to %v because -search.maxConcurrentRequests=%d.", 2e9, maxConcurrentRequests)
return 2e9
}
// Calculate the max metrics limit for a single request in the worst-case concurrent scenario.
// The approximate size of 1 unique series that could occupy in the vmstorage is 200 bytes.
mts := remainingMemory / 200 / maxConcurrentRequests
logger.Infof("limiting -search.maxUniqueTimeseries to %d according to -search.maxConcurrentRequests=%d and remaining memory=%d bytes. To increase the limit, reduce -search.maxConcurrentRequests or increase memory available to the process.", mts, maxConcurrentRequests, remainingMemory)
return mts
}
// GetMaxUniqueTimeSeries returns the max metrics limit calculated by available resources.
func GetMaxUniqueTimeSeries() int {
return maxUniqueTimeseriesValue
}

View file

@ -4,6 +4,7 @@ import (
"math" "math"
"net/http" "net/http"
"reflect" "reflect"
"runtime"
"testing" "testing"
"github.com/VictoriaMetrics/VictoriaMetrics/app/vmselect/netstorage" "github.com/VictoriaMetrics/VictoriaMetrics/app/vmselect/netstorage"
@ -229,3 +230,29 @@ func TestGetLatencyOffsetMillisecondsFailure(t *testing.T) {
} }
f("http://localhost?latency_offset=foobar") f("http://localhost?latency_offset=foobar")
} }
func TestCalculateMaxMetricsLimitByResource(t *testing.T) {
f := func(maxConcurrentRequest, remainingMemory, expect int) {
t.Helper()
maxMetricsLimit := calculateMaxUniqueTimeSeriesForResource(maxConcurrentRequest, remainingMemory)
if maxMetricsLimit != expect {
t.Fatalf("unexpected max metrics limit: got %d, want %d", maxMetricsLimit, expect)
}
}
// Skip when GOARCH=386
if runtime.GOARCH != "386" {
// 8 CPU & 32 GiB
f(16, int(math.Round(32*1024*1024*1024*0.4)), 4294967)
// 4 CPU & 32 GiB
f(8, int(math.Round(32*1024*1024*1024*0.4)), 8589934)
}
// 2 CPU & 4 GiB
f(4, int(math.Round(4*1024*1024*1024*0.4)), 2147483)
// other edge cases
f(0, int(math.Round(4*1024*1024*1024*0.4)), 2e9)
f(4, 0, 0)
}

View file

@ -44,7 +44,7 @@ var (
"See also -search.logSlowQueryDuration and -search.maxMemoryPerQuery") "See also -search.logSlowQueryDuration and -search.maxMemoryPerQuery")
noStaleMarkers = flag.Bool("search.noStaleMarkers", false, "Set this flag to true if the database doesn't contain Prometheus stale markers, "+ noStaleMarkers = flag.Bool("search.noStaleMarkers", false, "Set this flag to true if the database doesn't contain Prometheus stale markers, "+
"so there is no need in spending additional CPU time on its handling. Staleness markers may exist only in data obtained from Prometheus scrape targets") "so there is no need in spending additional CPU time on its handling. Staleness markers may exist only in data obtained from Prometheus scrape targets")
minWindowForInstantRollupOptimization = flagutil.NewDuration("search.minWindowForInstantRollupOptimization", "3h", "Enable cache-based optimization for repeated queries "+ minWindowForInstantRollupOptimization = flag.Duration("search.minWindowForInstantRollupOptimization", time.Hour*3, "Enable cache-based optimization for repeated queries "+
"to /api/v1/query (aka instant queries), which contain rollup functions with lookbehind window exceeding the given value") "to /api/v1/query (aka instant queries), which contain rollup functions with lookbehind window exceeding the given value")
) )
@ -1092,7 +1092,6 @@ func evalInstantRollup(qt *querytracer.Tracer, ec *EvalConfig, funcName string,
again: again:
offset := int64(0) offset := int64(0)
tssCached := rollupResultCacheV.GetInstantValues(qt, expr, window, ec.Step, ec.EnforcedTagFilterss) tssCached := rollupResultCacheV.GetInstantValues(qt, expr, window, ec.Step, ec.EnforcedTagFilterss)
ec.QueryStats.addSeriesFetched(len(tssCached))
if len(tssCached) == 0 { if len(tssCached) == 0 {
// Cache miss. Re-populate the missing data. // Cache miss. Re-populate the missing data.
start := int64(fasttime.UnixTimestamp()*1000) - cacheTimestampOffset.Milliseconds() start := int64(fasttime.UnixTimestamp()*1000) - cacheTimestampOffset.Milliseconds()
@ -1136,6 +1135,7 @@ func evalInstantRollup(qt *querytracer.Tracer, ec *EvalConfig, funcName string,
deleteCachedSeries(qt) deleteCachedSeries(qt)
goto again goto again
} }
ec.QueryStats.addSeriesFetched(len(tssCached))
return tssCached, offset, nil return tssCached, offset, nil
} }
@ -1647,6 +1647,10 @@ func evalRollupFuncWithMetricExpr(qt *querytracer.Tracer, ec *EvalConfig, funcNa
ecNew = copyEvalConfig(ec) ecNew = copyEvalConfig(ec)
ecNew.Start = start ecNew.Start = start
} }
// call to evalWithConfig also updates QueryStats.addSeriesFetched
// without checking whether tss has intersection with tssCached.
// So final number could be bigger than actual number of unique series.
// This discrepancy is acceptable, since seriesFetched stat is used as info only.
tss, err := evalWithConfig(ecNew) tss, err := evalWithConfig(ecNew)
if err != nil { if err != nil {
return nil, err return nil, err

View file

@ -1,13 +1,13 @@
{ {
"files": { "files": {
"main.css": "./static/css/main.d871147a.css", "main.css": "./static/css/main.d781989c.css",
"main.js": "./static/js/main.621c4b4d.js", "main.js": "./static/js/main.68e2aae8.js",
"static/js/685.f772060c.chunk.js": "./static/js/685.f772060c.chunk.js", "static/js/685.f772060c.chunk.js": "./static/js/685.f772060c.chunk.js",
"static/media/MetricsQL.md": "./static/media/MetricsQL.a00044c91d9781cf8557.md", "static/media/MetricsQL.md": "./static/media/MetricsQL.a00044c91d9781cf8557.md",
"index.html": "./index.html" "index.html": "./index.html"
}, },
"entrypoints": [ "entrypoints": [
"static/css/main.d871147a.css", "static/css/main.d781989c.css",
"static/js/main.621c4b4d.js" "static/js/main.68e2aae8.js"
] ]
} }

View file

@ -1 +1 @@
<!doctype html><html lang="en"><head><meta charset="utf-8"/><link rel="icon" href="./favicon.svg"/><link rel="apple-touch-icon" href="./favicon.svg"/><link rel="mask-icon" href="./favicon.svg" color="#000000"><meta name="viewport" content="width=device-width,initial-scale=1,maximum-scale=5"/><meta name="theme-color" content="#000000"/><meta name="description" content="Explore and troubleshoot your VictoriaMetrics data"/><link rel="manifest" href="./manifest.json"/><title>vmui</title><script src="./dashboards/index.js" type="module"></script><meta name="twitter:card" content="summary"><meta name="twitter:title" content="UI for VictoriaMetrics"><meta name="twitter:site" content="@https://victoriametrics.com/"><meta name="twitter:description" content="Explore and troubleshoot your VictoriaMetrics data"><meta name="twitter:image" content="./preview.jpg"><meta property="og:type" content="website"><meta property="og:title" content="UI for VictoriaMetrics"><meta property="og:url" content="https://victoriametrics.com/"><meta property="og:description" content="Explore and troubleshoot your VictoriaMetrics data"><script defer="defer" src="./static/js/main.621c4b4d.js"></script><link href="./static/css/main.d871147a.css" rel="stylesheet"></head><body><noscript>You need to enable JavaScript to run this app.</noscript><div id="root"></div></body></html> <!doctype html><html lang="en"><head><meta charset="utf-8"/><link rel="icon" href="./favicon.svg"/><link rel="apple-touch-icon" href="./favicon.svg"/><link rel="mask-icon" href="./favicon.svg" color="#000000"><meta name="viewport" content="width=device-width,initial-scale=1,maximum-scale=5"/><meta name="theme-color" content="#000000"/><meta name="description" content="Explore and troubleshoot your VictoriaMetrics data"/><link rel="manifest" href="./manifest.json"/><title>vmui</title><script src="./dashboards/index.js" type="module"></script><meta name="twitter:card" content="summary"><meta name="twitter:title" content="UI for VictoriaMetrics"><meta name="twitter:site" content="@https://victoriametrics.com/"><meta name="twitter:description" content="Explore and troubleshoot your VictoriaMetrics data"><meta name="twitter:image" content="./preview.jpg"><meta property="og:type" content="website"><meta property="og:title" content="UI for VictoriaMetrics"><meta property="og:url" content="https://victoriametrics.com/"><meta property="og:description" content="Explore and troubleshoot your VictoriaMetrics data"><script defer="defer" src="./static/js/main.68e2aae8.js"></script><link href="./static/css/main.d781989c.css" rel="stylesheet"></head><body><noscript>You need to enable JavaScript to run this app.</noscript><div id="root"></div></body></html>

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View file

@ -28,11 +28,11 @@ import (
) )
var ( var (
retentionPeriod = flagutil.NewDuration("retentionPeriod", "1", "Data with timestamps outside the retentionPeriod is automatically deleted. The minimum retentionPeriod is 24h or 1d. See also -retentionFilter") retentionPeriod = flagutil.NewRetentionDuration("retentionPeriod", "1", "Data with timestamps outside the retentionPeriod is automatically deleted. The minimum retentionPeriod is 24h or 1d. See also -retentionFilter")
snapshotAuthKey = flagutil.NewPassword("snapshotAuthKey", "authKey, which must be passed in query string to /snapshot* pages. It overrides -httpAuth.*") snapshotAuthKey = flagutil.NewPassword("snapshotAuthKey", "authKey, which must be passed in query string to /snapshot* pages. It overrides -httpAuth.*")
forceMergeAuthKey = flagutil.NewPassword("forceMergeAuthKey", "authKey, which must be passed in query string to /internal/force_merge pages. It overrides -httpAuth.*") forceMergeAuthKey = flagutil.NewPassword("forceMergeAuthKey", "authKey, which must be passed in query string to /internal/force_merge pages. It overrides -httpAuth.*")
forceFlushAuthKey = flagutil.NewPassword("forceFlushAuthKey", "authKey, which must be passed in query string to /internal/force_flush pages. It overrides -httpAuth.*") forceFlushAuthKey = flagutil.NewPassword("forceFlushAuthKey", "authKey, which must be passed in query string to /internal/force_flush pages. It overrides -httpAuth.*")
snapshotsMaxAge = flagutil.NewDuration("snapshotsMaxAge", "0", "Automatically delete snapshots older than -snapshotsMaxAge if it is set to non-zero duration. Make sure that backup process has enough time to finish the backup before the corresponding snapshot is automatically deleted") snapshotsMaxAge = flagutil.NewRetentionDuration("snapshotsMaxAge", "0", "Automatically delete snapshots older than -snapshotsMaxAge if it is set to non-zero duration. Make sure that backup process has enough time to finish the backup before the corresponding snapshot is automatically deleted")
_ = flag.Duration("snapshotCreateTimeout", 0, "Deprecated: this flag does nothing") _ = flag.Duration("snapshotCreateTimeout", 0, "Deprecated: this flag does nothing")
precisionBits = flag.Int("precisionBits", 64, "The number of precision bits to store per each value. Lower precision bits improves data compression at the cost of precision loss") precisionBits = flag.Int("precisionBits", 64, "The number of precision bits to store per each value. Lower precision bits improves data compression at the cost of precision loss")

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,5 @@
{
"license": {
"type": "opensource"
}
}

View file

@ -15,6 +15,8 @@ import WithTemplate from "./pages/WithTemplate";
import Relabel from "./pages/Relabel"; import Relabel from "./pages/Relabel";
import ActiveQueries from "./pages/ActiveQueries"; import ActiveQueries from "./pages/ActiveQueries";
import QueryAnalyzer from "./pages/QueryAnalyzer"; import QueryAnalyzer from "./pages/QueryAnalyzer";
import DownsamplingFilters from "./pages/DownsamplingFilters";
import RetentionFilters from "./pages/RetentionFilters";
const App: FC = () => { const App: FC = () => {
const [loadedTheme, setLoadedTheme] = useState(false); const [loadedTheme, setLoadedTheme] = useState(false);
@ -74,6 +76,14 @@ const App: FC = () => {
path={router.icons} path={router.icons}
element={<PreviewIcons/>} element={<PreviewIcons/>}
/> />
<Route
path={router.downsamplingDebug}
element={<DownsamplingFilters/>}
/>
<Route
path={router.retentionDebug}
element={<RetentionFilters/>}
/>
</Route> </Route>
</Routes> </Routes>
)} )}

View file

@ -0,0 +1,7 @@
export const getDownsamplingFiltersDebug = (server: string, flags: string, metrics: string): string => {
const params = [
`flags=${encodeURIComponent(flags)}`,
`metrics=${encodeURIComponent(metrics)}`
];
return `${server}/downsampling-filters-debug?${params.join("&")}`;
};

View file

@ -0,0 +1,7 @@
export const getRetentionFiltersDebug = (server: string, flags: string, metrics: string): string => {
const params = [
`flags=${encodeURIComponent(flags)}`,
`metrics=${encodeURIComponent(metrics)}`
];
return `${server}/retention-filters-debug?${params.join("&")}`;
};

View file

@ -33,12 +33,15 @@ const BarHitsChart: FC<Props> = ({ logHits, data: _data, period, setPeriod, onAp
graphStyle: GRAPH_STYLES.LINE_STEPPED, graphStyle: GRAPH_STYLES.LINE_STEPPED,
stacked: false, stacked: false,
fill: false, fill: false,
hideChart: false,
}); });
const { xRange, setPlotScale } = usePlotScale({ period, setPeriod }); const { xRange, setPlotScale } = usePlotScale({ period, setPeriod });
const { onReadyChart, isPanning } = useReadyChart(setPlotScale); const { onReadyChart, isPanning } = useReadyChart(setPlotScale);
useZoomChart({ uPlotInst, xRange, setPlotScale }); useZoomChart({ uPlotInst, xRange, setPlotScale });
const isEmptyData = useMemo(() => _data.every(d => d.length === 0), [_data]);
const { data, bands } = useMemo(() => { const { data, bands } = useMemo(() => {
return graphOptions.stacked ? stack(_data, () => false) : { data: _data, bands: [] }; return graphOptions.stacked ? stack(_data, () => false) : { data: _data, bands: [] };
}, [graphOptions, _data]); }, [graphOptions, _data]);
@ -88,26 +91,33 @@ const BarHitsChart: FC<Props> = ({ logHits, data: _data, period, setPeriod, onAp
}, [data]); }, [data]);
return ( return (
<div className="vm-bar-hits-chart__wrapper"> <div
<div className={classNames({
className={classNames({ "vm-bar-hits-chart__wrapper": true,
"vm-bar-hits-chart": true, "vm-bar-hits-chart__wrapper_hidden": graphOptions.hideChart
"vm-bar-hits-chart_panning": isPanning })}
})} >
ref={containerRef} {!graphOptions.hideChart && (
>
<div <div
className="vm-line-chart__u-plot" className={classNames({
ref={uPlotRef} "vm-bar-hits-chart": true,
/> "vm-bar-hits-chart_panning": isPanning
<BarHitsTooltip })}
uPlotInst={uPlotInst} ref={containerRef}
data={_data} >
focusDataIdx={focusDataIdx} <div
/> className="vm-line-chart__u-plot"
</div> ref={uPlotRef}
/>
<BarHitsTooltip
uPlotInst={uPlotInst}
data={_data}
focusDataIdx={focusDataIdx}
/>
</div>
)}
<BarHitsOptions onChange={setGraphOptions}/> <BarHitsOptions onChange={setGraphOptions}/>
{uPlotInst && ( {uPlotInst && !isEmptyData && !graphOptions.hideChart && (
<BarHitsLegend <BarHitsLegend
uPlotInst={uPlotInst} uPlotInst={uPlotInst}
onApplyFilter={onApplyFilter} onApplyFilter={onApplyFilter}

View file

@ -6,7 +6,7 @@ import useStateSearchParams from "../../../../hooks/useStateSearchParams";
import { useSearchParams } from "react-router-dom"; import { useSearchParams } from "react-router-dom";
import Button from "../../../Main/Button/Button"; import Button from "../../../Main/Button/Button";
import classNames from "classnames"; import classNames from "classnames";
import { SettingsIcon } from "../../../Main/Icons"; import { SettingsIcon, VisibilityIcon, VisibilityOffIcon } from "../../../Main/Icons";
import Tooltip from "../../../Main/Tooltip/Tooltip"; import Tooltip from "../../../Main/Tooltip/Tooltip";
import Popper from "../../../Main/Popper/Popper"; import Popper from "../../../Main/Popper/Popper";
import useBoolean from "../../../../hooks/useBoolean"; import useBoolean from "../../../../hooks/useBoolean";
@ -27,12 +27,14 @@ const BarHitsOptions: FC<Props> = ({ onChange }) => {
const [graphStyle, setGraphStyle] = useStateSearchParams(GRAPH_STYLES.LINE_STEPPED, "graph"); const [graphStyle, setGraphStyle] = useStateSearchParams(GRAPH_STYLES.LINE_STEPPED, "graph");
const [stacked, setStacked] = useStateSearchParams(false, "stacked"); const [stacked, setStacked] = useStateSearchParams(false, "stacked");
const [fill, setFill] = useStateSearchParams(false, "fill"); const [fill, setFill] = useStateSearchParams(false, "fill");
const [hideChart, setHideChart] = useStateSearchParams(false, "hide_chart");
const options: GraphOptions = useMemo(() => ({ const options: GraphOptions = useMemo(() => ({
graphStyle, graphStyle,
stacked, stacked,
fill, fill,
}), [graphStyle, stacked, fill]); hideChart,
}), [graphStyle, stacked, fill, hideChart]);
const handleChangeGraphStyle = (val: string) => () => { const handleChangeGraphStyle = (val: string) => () => {
setGraphStyle(val as GRAPH_STYLES); setGraphStyle(val as GRAPH_STYLES);
@ -52,24 +54,41 @@ const BarHitsOptions: FC<Props> = ({ onChange }) => {
setSearchParams(searchParams); setSearchParams(searchParams);
}; };
const toggleHideChart = () => {
setHideChart(prev => {
const newVal = !prev;
newVal ? searchParams.set("hide_chart", "true") : searchParams.delete("hide_chart");
setSearchParams(searchParams);
return newVal;
});
};
useEffect(() => { useEffect(() => {
onChange(options); onChange(options);
}, [options]); }, [options]);
return ( return (
<div <div className="vm-bar-hits-options">
className="vm-bar-hits-options" <Tooltip title={hideChart ? "Show chart and resume hits updates" : "Hide chart and pause hits updates"}>
ref={optionsButtonRef}
>
<Tooltip title="Graph settings">
<Button <Button
variant="text" variant="text"
color="primary" color="primary"
startIcon={<SettingsIcon/>} startIcon={hideChart ? <VisibilityOffIcon/> : <VisibilityIcon/>}
onClick={toggleOpenOptions} onClick={toggleHideChart}
ariaLabel="settings" ariaLabel="settings"
/> />
</Tooltip> </Tooltip>
<div ref={optionsButtonRef}>
<Tooltip title="Graph settings">
<Button
variant="text"
color="primary"
startIcon={<SettingsIcon/>}
onClick={toggleOpenOptions}
ariaLabel="settings"
/>
</Tooltip>
</div>
<Popper <Popper
open={openOptions} open={openOptions}
placement="bottom-right" placement="bottom-right"

View file

@ -1,6 +1,8 @@
@use "src/styles/variables" as *; @use "src/styles/variables" as *;
.vm-bar-hits-options { .vm-bar-hits-options {
display: flex;
align-items: center;
position: absolute; position: absolute;
top: $padding-small; top: $padding-small;
right: $padding-small; right: $padding-small;

View file

@ -10,6 +10,10 @@
flex-direction: column; flex-direction: column;
width: 100%; width: 100%;
height: 100%; height: 100%;
&_hidden {
min-height: 90px;
}
} }
&_panning { &_panning {

View file

@ -9,4 +9,5 @@ export interface GraphOptions {
graphStyle: GRAPH_STYLES; graphStyle: GRAPH_STYLES;
stacked: boolean; stacked: boolean;
fill: boolean; fill: boolean;
hideChart: boolean;
} }

View file

@ -51,6 +51,8 @@
&__content { &__content {
filter: brightness(0.6); filter: brightness(0.6);
white-space: pre-line; white-space: pre-line;
text-wrap: balance;
overflow-wrap: anywhere;
} }
&_success { &_success {

View file

@ -553,3 +553,20 @@ export const SearchIcon = () => (
></path> ></path>
</svg> </svg>
); );
export const SpinnerIcon = () => (
<svg viewBox="0 0 24 24">
<path
fill="currentColor"
d="M12,4a8,8,0,0,1,7.89,6.7A1.53,1.53,0,0,0,21.38,12h0a1.5,1.5,0,0,0,1.48-1.75,11,11,0,0,0-21.72,0A1.5,1.5,0,0,0,2.62,12h0a1.53,1.53,0,0,0,1.49-1.3A8,8,0,0,1,12,4Z"
>
<animateTransform
attributeName="transform"
dur="0.75s"
repeatCount="indefinite"
type="rotate"
values="0 12 12;360 12 12"
/>
</path>
</svg>
);

View file

@ -0,0 +1,13 @@
import React, { FC } from "preact/compat";
import "./style.scss";
const LineLoader: FC = () => {
return (
<div className="vm-line-loader">
<div className="vm-line-loader__background"></div>
<div className="vm-line-loader__line"></div>
</div>
);
};
export default LineLoader;

View file

@ -0,0 +1,39 @@
@use "src/styles/variables" as *;
.vm-line-loader {
position: absolute;
top: 0;
left: 0;
right: 0;
height: 2px;
z-index: 2;
overflow: hidden;
&__background {
position: absolute;
left: 0;
right: 0;
top: 0;
bottom: 0;
background-color: $color-text;
opacity: 0.1;
}
&__line {
position: absolute;
width: 10%;
height: 100%;
background-color: $color-primary;
animation: slide 2s infinite linear;
opacity: 0.8;
}
}
@keyframes slide {
0% {
left: 0;
}
100% {
left: 100%;
}
}

View file

@ -1,81 +0,0 @@
import router, { routerOptions } from "../router";
export enum NavigationItemType {
internalLink,
externalLink,
}
export interface NavigationItem {
label?: string,
value?: string,
hide?: boolean
submenu?: NavigationItem[],
type?: NavigationItemType,
}
const explore = {
label: "Explore",
submenu: [
{
label: routerOptions[router.metrics].title,
value: router.metrics,
},
{
label: routerOptions[router.cardinality].title,
value: router.cardinality,
},
{
label: routerOptions[router.topQueries].title,
value: router.topQueries,
},
{
label: routerOptions[router.activeQueries].title,
value: router.activeQueries,
},
]
};
const tools = {
label: "Tools",
submenu: [
{
label: routerOptions[router.trace].title,
value: router.trace,
},
{
label: routerOptions[router.queryAnalyzer].title,
value: router.queryAnalyzer,
},
{
label: routerOptions[router.withTemplate].title,
value: router.withTemplate,
},
{
label: routerOptions[router.relabel].title,
value: router.relabel,
},
]
};
export const logsNavigation: NavigationItem[] = [
{
label: routerOptions[router.logs].title,
value: router.home,
},
];
export const anomalyNavigation: NavigationItem[] = [
{
label: routerOptions[router.anomaly].title,
value: router.home,
}
];
export const defaultNavigation: NavigationItem[] = [
{
label: routerOptions[router.home].title,
value: router.home,
},
explore,
tools,
];

View file

@ -0,0 +1,34 @@
import { useAppDispatch } from "../state/common/StateContext";
import { useEffect, useState } from "preact/compat";
import { ErrorTypes } from "../types";
const useFetchFlags = () => {
const dispatch = useAppDispatch();
const [isLoading, setIsLoading] = useState(false);
const [error, setError] = useState<ErrorTypes | string>("");
useEffect(() => {
const fetchAppConfig = async () => {
if (process.env.REACT_APP_TYPE) return;
setError("");
setIsLoading(true);
try {
const data = await fetch("./config.json");
const config = await data.json();
dispatch({ type: "SET_APP_CONFIG", payload: config || {} });
} catch (e) {
setIsLoading(false);
if (e instanceof Error) setError(`${e.name}: ${e.message}`);
}
};
fetchAppConfig();
}, []);
return { isLoading, error };
};
export default useFetchFlags;

View file

@ -34,7 +34,8 @@ interface FetchQueryReturn {
queryStats: QueryStats[], queryStats: QueryStats[],
warning?: string, warning?: string,
traces?: Trace[], traces?: Trace[],
isHistogram: boolean isHistogram: boolean,
abortFetch: () => void
} }
interface FetchDataParams { interface FetchDataParams {
@ -160,6 +161,7 @@ export const useFetchQuery = ({
const error = e as Error; const error = e as Error;
if (error.name === "AbortError") { if (error.name === "AbortError") {
// Aborts are expected, don't show an error for them. // Aborts are expected, don't show an error for them.
setIsLoading(false);
return; return;
} }
const helperText = "Please check your serverURL settings and confirm server availability."; const helperText = "Please check your serverURL settings and confirm server availability.";
@ -197,6 +199,13 @@ export const useFetchQuery = ({
}, },
[serverUrl, period, displayType, customStep, hideQuery]); [serverUrl, period, displayType, customStep, hideQuery]);
const abortFetch = useCallback(() => {
fetchQueue.map(f => f.abort());
setFetchQueue([]);
setGraphData([]);
setLiveData([]);
}, [fetchQueue]);
const [prevUrl, setPrevUrl] = useState<string[]>([]); const [prevUrl, setPrevUrl] = useState<string[]>([]);
useEffect(() => { useEffect(() => {
@ -238,6 +247,7 @@ export const useFetchQuery = ({
queryStats, queryStats,
warning, warning,
traces, traces,
isHistogram isHistogram,
abortFetch,
}; };
}; };

View file

@ -1,16 +1,12 @@
import React, { FC, useMemo, useState } from "preact/compat"; import React, { FC, useState } from "preact/compat";
import router, { routerOptions } from "../../../router";
import { getAppModeEnable } from "../../../utils/app-mode";
import { useLocation } from "react-router-dom"; import { useLocation } from "react-router-dom";
import { useDashboardsState } from "../../../state/dashboards/DashboardsStateContext";
import { useEffect } from "react"; import { useEffect } from "react";
import "./style.scss"; import "./style.scss";
import NavItem from "./NavItem"; import NavItem from "./NavItem";
import NavSubItem from "./NavSubItem"; import NavSubItem from "./NavSubItem";
import classNames from "classnames"; import classNames from "classnames";
import { anomalyNavigation, defaultNavigation, logsNavigation, NavigationItemType } from "../../../constants/navigation"; import useNavigationMenu from "../../../router/useNavigationMenu";
import { AppType } from "../../../types/appType"; import { NavigationItemType } from "../../../router/navigation";
import { useAppState } from "../../../state/common/StateContext";
interface HeaderNavProps { interface HeaderNavProps {
color: string color: string
@ -19,43 +15,14 @@ interface HeaderNavProps {
} }
const HeaderNav: FC<HeaderNavProps> = ({ color, background, direction }) => { const HeaderNav: FC<HeaderNavProps> = ({ color, background, direction }) => {
const appModeEnable = getAppModeEnable();
const { dashboardsSettings } = useDashboardsState();
const { pathname } = useLocation(); const { pathname } = useLocation();
const { serverUrl, flags } = useAppState();
const [activeMenu, setActiveMenu] = useState(pathname); const [activeMenu, setActiveMenu] = useState(pathname);
const menu = useNavigationMenu();
const menu = useMemo(() => {
switch (process.env.REACT_APP_TYPE) {
case AppType.logs:
return logsNavigation;
case AppType.anomaly:
return anomalyNavigation;
default:
return ([
...defaultNavigation,
{
label: routerOptions[router.dashboards].title,
value: router.dashboards,
hide: appModeEnable || !dashboardsSettings.length,
},
{
// see more https://docs.victoriametrics.com/cluster-victoriametrics/?highlight=vmalertproxyurl#vmalert
label: "Alerts",
value: `${serverUrl}/vmalert`,
type: NavigationItemType.externalLink,
hide: !Object.keys(flags).includes("vmalert.proxyURL"),
},
].filter(r => !r.hide));
}
}, [appModeEnable, dashboardsSettings]);
useEffect(() => { useEffect(() => {
setActiveMenu(pathname); setActiveMenu(pathname);
}, [pathname]); }, [pathname]);
return ( return (
<nav <nav
className={classNames({ className={classNames({

View file

@ -1,7 +1,7 @@
import React, { FC } from "preact/compat"; import React, { FC } from "preact/compat";
import { NavLink } from "react-router-dom"; import { NavLink } from "react-router-dom";
import classNames from "classnames"; import classNames from "classnames";
import { NavigationItemType } from "../../../constants/navigation"; import { NavigationItemType } from "../../../router/navigation";
interface NavItemProps { interface NavItemProps {
activeMenu: string, activeMenu: string,

View file

@ -6,7 +6,7 @@ import Popper from "../../../components/Main/Popper/Popper";
import NavItem from "./NavItem"; import NavItem from "./NavItem";
import { useEffect } from "react"; import { useEffect } from "react";
import useBoolean from "../../../hooks/useBoolean"; import useBoolean from "../../../hooks/useBoolean";
import { NavigationItem, NavigationItemType } from "../../../constants/navigation"; import { NavigationItem, NavigationItemType } from "../../../router/navigation";
interface NavItemProps { interface NavItemProps {
activeMenu: string, activeMenu: string,

View file

@ -12,6 +12,7 @@ import useDeviceDetect from "../../hooks/useDeviceDetect";
import ControlsMainLayout from "./ControlsMainLayout"; import ControlsMainLayout from "./ControlsMainLayout";
import useFetchDefaultTimezone from "../../hooks/useFetchDefaultTimezone"; import useFetchDefaultTimezone from "../../hooks/useFetchDefaultTimezone";
import useFetchFlags from "../../hooks/useFetchFlags"; import useFetchFlags from "../../hooks/useFetchFlags";
import useFetchAppConfig from "../../hooks/useFetchAppConfig";
const MainLayout: FC = () => { const MainLayout: FC = () => {
const appModeEnable = getAppModeEnable(); const appModeEnable = getAppModeEnable();
@ -21,6 +22,7 @@ const MainLayout: FC = () => {
useFetchDashboards(); useFetchDashboards();
useFetchDefaultTimezone(); useFetchDefaultTimezone();
useFetchAppConfig();
useFetchFlags(); useFetchFlags();
const setDocumentTitle = () => { const setDocumentTitle = () => {

View file

@ -41,10 +41,10 @@ const CardinalityTotals: FC<CardinalityTotalsProps> = ({
value: totalSeries.toLocaleString("en-US"), value: totalSeries.toLocaleString("en-US"),
dynamic: (!totalSeries || !totalSeriesPrev || isPrometheus) ? "" : `${dynamic.toFixed(2)}%`, dynamic: (!totalSeries || !totalSeriesPrev || isPrometheus) ? "" : `${dynamic.toFixed(2)}%`,
display: !focusLabel, display: !focusLabel,
info: `The total number of active time series. info: `The total number of unique time series for a selected day.
A time series is uniquely identified by its name plus a set of its labels. A time series is uniquely identified by its name plus a set of its labels.
For example, temperature{city="NY",country="US"} and temperature{city="SF",country="US"} For example, temperature{city="NY",country="US"} and temperature{city="SF",country="US"}
are two distinct series, since they differ by the city label.` are two distinct series, since they differ by the "city" label.`
}, },
{ {
title: "Percentage from total", title: "Percentage from total",

View file

@ -10,6 +10,7 @@ import {
PlayIcon, PlayIcon,
PlusIcon, PlusIcon,
Prettify, Prettify,
SpinnerIcon,
VisibilityIcon, VisibilityIcon,
VisibilityOffIcon VisibilityOffIcon
} from "../../../components/Main/Icons"; } from "../../../components/Main/Icons";
@ -30,8 +31,10 @@ export interface QueryConfiguratorProps {
setQueryErrors: Dispatch<SetStateAction<string[]>>; setQueryErrors: Dispatch<SetStateAction<string[]>>;
setHideError: Dispatch<SetStateAction<boolean>>; setHideError: Dispatch<SetStateAction<boolean>>;
stats: QueryStats[]; stats: QueryStats[];
isLoading?: boolean;
onHideQuery?: (queries: number[]) => void onHideQuery?: (queries: number[]) => void
onRunQuery: () => void; onRunQuery: () => void;
abortFetch?: () => void;
hideButtons?: { hideButtons?: {
addQuery?: boolean; addQuery?: boolean;
prettify?: boolean; prettify?: boolean;
@ -46,8 +49,10 @@ const QueryConfigurator: FC<QueryConfiguratorProps> = ({
setQueryErrors, setQueryErrors,
setHideError, setHideError,
stats, stats,
isLoading,
onHideQuery, onHideQuery,
onRunQuery, onRunQuery,
abortFetch,
hideButtons hideButtons
}) => { }) => {
@ -84,6 +89,10 @@ const QueryConfigurator: FC<QueryConfiguratorProps> = ({
}; };
const handleRunQuery = () => { const handleRunQuery = () => {
if (isLoading) {
abortFetch && abortFetch();
return;
}
updateHistory(); updateHistory();
queryDispatch({ type: "SET_QUERY", payload: stateQuery }); queryDispatch({ type: "SET_QUERY", payload: stateQuery });
timeDispatch({ type: "RUN_QUERY" }); timeDispatch({ type: "RUN_QUERY" });
@ -271,9 +280,9 @@ const QueryConfigurator: FC<QueryConfiguratorProps> = ({
<Button <Button
variant="contained" variant="contained"
onClick={handleRunQuery} onClick={handleRunQuery}
startIcon={<PlayIcon/>} startIcon={isLoading ? <SpinnerIcon/> : <PlayIcon/>}
> >
{isMobile ? "Execute" : "Execute Query"} {`${isLoading ? "Cancel" : "Execute"} ${isMobile ? "" : "Query"}`}
</Button> </Button>
</div> </div>
</div> </div>

View file

@ -3,7 +3,7 @@ import QueryConfigurator from "./QueryConfigurator/QueryConfigurator";
import { useFetchQuery } from "../../hooks/useFetchQuery"; import { useFetchQuery } from "../../hooks/useFetchQuery";
import { DisplayTypeSwitch } from "./DisplayTypeSwitch"; import { DisplayTypeSwitch } from "./DisplayTypeSwitch";
import { useGraphDispatch, useGraphState } from "../../state/graph/GraphStateContext"; import { useGraphDispatch, useGraphState } from "../../state/graph/GraphStateContext";
import Spinner from "../../components/Main/Spinner/Spinner"; import LineLoader from "../../components/Main/LineLoader/LineLoader";
import { useCustomPanelState } from "../../state/customPanel/CustomPanelStateContext"; import { useCustomPanelState } from "../../state/customPanel/CustomPanelStateContext";
import { useQueryState } from "../../state/query/QueryStateContext"; import { useQueryState } from "../../state/query/QueryStateContext";
import { useSetQueryParams } from "./hooks/useSetQueryParams"; import { useSetQueryParams } from "./hooks/useSetQueryParams";
@ -45,7 +45,8 @@ const CustomPanel: FC = () => {
queryStats, queryStats,
warning, warning,
traces, traces,
isHistogram isHistogram,
abortFetch,
} = useFetchQuery({ } = useFetchQuery({
visible: true, visible: true,
customStep, customStep,
@ -80,14 +81,15 @@ const CustomPanel: FC = () => {
setQueryErrors={setQueryErrors} setQueryErrors={setQueryErrors}
setHideError={setHideError} setHideError={setHideError}
stats={queryStats} stats={queryStats}
isLoading={isLoading}
onHideQuery={handleHideQuery} onHideQuery={handleHideQuery}
onRunQuery={handleRunQuery} onRunQuery={handleRunQuery}
abortFetch={abortFetch}
/> />
<CustomPanelTraces <CustomPanelTraces
traces={traces} traces={traces}
displayType={displayType} displayType={displayType}
/> />
{isLoading && <Spinner />}
{showError && <Alert variant="error">{error}</Alert>} {showError && <Alert variant="error">{error}</Alert>}
{showInstantQueryTip && <Alert variant="info"><InstantQueryTip/></Alert>} {showInstantQueryTip && <Alert variant="info"><InstantQueryTip/></Alert>}
{warning && ( {warning && (
@ -105,6 +107,7 @@ const CustomPanel: FC = () => {
"vm-block_mobile": isMobile, "vm-block_mobile": isMobile,
})} })}
> >
{isLoading && <LineLoader />}
<div <div
className="vm-custom-panel-body-header" className="vm-custom-panel-body-header"
ref={controlsRef} ref={controlsRef}

View file

@ -0,0 +1,53 @@
import { useAppState } from "../../../state/common/StateContext";
import { useState } from "react";
import { ErrorTypes } from "../../../types";
import { useSearchParams } from "react-router-dom";
import { getDownsamplingFiltersDebug } from "../../../api/downsampling-filters-debug";
import { useCallback } from "preact/compat";
export const useDebugDownsamplingFilters = () => {
const { serverUrl } = useAppState();
const [searchParams, setSearchParams] = useSearchParams();
const [data, setData] = useState<Map<string, string[]>>(new Map());
const [loading, setLoading] = useState(false);
const [metricsError, setMetricsError] = useState<ErrorTypes | string>();
const [flagsError, setFlagsError] = useState<ErrorTypes | string>();
const [error, setError] = useState<ErrorTypes | string>();
const fetchData = useCallback(async (flags: string, metrics: string) => {
metrics ? setMetricsError("") : setMetricsError("metrics are required");
flags ? setFlagsError("") : setFlagsError("flags are required");
if (!metrics || !flags) return;
searchParams.set("flags", flags);
searchParams.set("metrics", metrics);
setSearchParams(searchParams);
const fetchUrl = getDownsamplingFiltersDebug(serverUrl, flags, metrics);
setLoading(true);
try {
const response = await fetch(fetchUrl);
const resp = await response.json();
setData(new Map(Object.entries(resp.result || {})));
setMetricsError(resp.error?.metrics || "");
setFlagsError(resp.error?.flags || "");
setError("");
} catch (e) {
if (e instanceof Error && e.name !== "AbortError") {
setError(`${e.name}: ${e.message}`);
}
}
setLoading(false);
}, [serverUrl]);
return {
data,
error: error,
metricsError: metricsError,
flagsError: flagsError,
loading,
applyFilters: fetchData
};
};

View file

@ -0,0 +1,137 @@
import React, { FC, useEffect } from "preact/compat";
import "./style.scss";
import TextField from "../../components/Main/TextField/TextField";
import { useCallback, useState } from "react";
import Button from "../../components/Main/Button/Button";
import { PlayIcon, WikiIcon } from "../../components/Main/Icons";
import { useDebugDownsamplingFilters } from "./hooks/useDebugDownsamplingFilters";
import Spinner from "../../components/Main/Spinner/Spinner";
import { useSearchParams } from "react-router-dom";
const example = {
flags: `-downsampling.period={env="dev"}:7d:5m,{env="dev"}:30d:30m
-downsampling.period=30d:1m
-downsampling.period=60d:5m
`,
metrics: `up
up{env="dev"}
up{env="prod"}`,
};
const DownsamplingFilters: FC = () => {
const [searchParams] = useSearchParams();
const { data, loading, error, metricsError, flagsError, applyFilters } = useDebugDownsamplingFilters();
const [metrics, setMetrics] = useState(searchParams.get("metrics") || "");
const [flags, setFlags] = useState(searchParams.get("flags") || "");
const handleMetricsChangeInput = useCallback((val: string) => {
setMetrics(val);
}, [setMetrics]);
const handleFlagsChangeInput = useCallback((val: string) => {
setFlags(val);
}, [setFlags]);
const handleApplyFilters = useCallback(() => {
applyFilters(flags, metrics);
}, [applyFilters, flags, metrics]);
const handleRunExample = useCallback(() => {
const { flags, metrics } = example;
setFlags(flags);
setMetrics(metrics);
applyFilters(flags, metrics);
searchParams.set("flags", flags);
searchParams.set("metrics", metrics);
}, [example, setFlags, setMetrics, searchParams]);
useEffect(() => {
if (flags && metrics) handleApplyFilters();
}, []);
const rows = [];
for (const [key, value] of data) {
rows.push(<tr className="vm-table__row">
<td className="vm-table-cell">{key}</td>
<td className="vm-table-cell">{value.join(" ")}</td>
</tr>);
}
return (
<section className="vm-downsampling-filters">
{loading && <Spinner/>}
<div className="vm-downsampling-filters-body vm-block">
<div className="vm-downsampling-filters-body__expr">
<div className="vm-retention-filters-body__title">
<p>Provide a list of flags for downsampling configuration. Note that
only <code>-downsampling.period</code> and <code>-dedup.minScrapeInterval</code> flags are supported</p>
</div>
<TextField
type="textarea"
label="Flags"
value={flags}
error={error || flagsError}
autofocus
onEnter={handleApplyFilters}
onChange={handleFlagsChangeInput}
placeholder={"-downsampling.period=30d:1m -downsampling.period=7d:5m -dedup.minScrapeInterval=30s"}
/>
</div>
<div className="vm-downsampling-filters-body__expr">
<div className="vm-retention-filters-body__title">
<p>Provide a list of metrics to check downsampling configuration.</p>
</div>
<TextField
type="textarea"
label="Metrics"
value={metrics}
error={error || metricsError}
onEnter={handleApplyFilters}
onChange={handleMetricsChangeInput}
placeholder={"up{env=\"dev\"}\nup{env=\"prod\"}\n"}
/>
</div>
<div className="vm-downsampling-filters-body__result">
<table className="vm-table">
<thead className="vm-table-header">
<tr>
<th className="vm-table-cell vm-table-cell_header">Metric</th>
<th className="vm-table-cell vm-table-cell_header">Applied downsampling rules</th>
</tr>
</thead>
<tbody className="vm-table-body">
{rows}
</tbody>
</table>
</div>
<div className="vm-downsampling-filters-body-top">
<a
className="vm-link vm-link_with-icon"
target="_blank"
href="https://docs.victoriametrics.com/#downsampling"
rel="help noreferrer"
>
<WikiIcon/>
Documentation
</a>
<Button
variant="text"
onClick={handleRunExample}
>
Try example
</Button>
<Button
variant="contained"
onClick={handleApplyFilters}
startIcon={<PlayIcon/>}
>
Apply
</Button>
</div>
</div>
</section>
);
};
export default DownsamplingFilters;

View file

@ -0,0 +1,46 @@
@use "src/styles/variables" as *;
.vm-downsampling-filters {
display: grid;
gap: $padding-medium;
&-body {
display: grid;
gap: $padding-global;
align-items: flex-start;
width: 100%;
&__title {
margin-bottom: $padding-medium;
}
&-top {
display: flex;
gap: $padding-small;
align-items: center;
justify-content: flex-end;
}
&__expr textarea {
min-height: 200px;
}
&__result textarea {
min-height: 60px;
}
code {
background-color: var(--color-hover-black);
border-radius: 6px;
font-size: 85%;
padding: .2em .4em;
}
textarea {
font-family: $font-family-monospace;
overflow: auto;
width: 100%;
height: 100%;
}
}
}

View file

@ -1,10 +1,9 @@
import React, { FC, useCallback, useEffect, useState } from "preact/compat"; import React, { FC, useCallback, useEffect, useMemo, useState } from "preact/compat";
import ExploreLogsBody from "./ExploreLogsBody/ExploreLogsBody"; import ExploreLogsBody from "./ExploreLogsBody/ExploreLogsBody";
import useStateSearchParams from "../../hooks/useStateSearchParams"; import useStateSearchParams from "../../hooks/useStateSearchParams";
import useSearchParamsFromObject from "../../hooks/useSearchParamsFromObject"; import useSearchParamsFromObject from "../../hooks/useSearchParamsFromObject";
import { useFetchLogs } from "./hooks/useFetchLogs"; import { useFetchLogs } from "./hooks/useFetchLogs";
import { useAppState } from "../../state/common/StateContext"; import { useAppState } from "../../state/common/StateContext";
import Spinner from "../../components/Main/Spinner/Spinner";
import Alert from "../../components/Main/Alert/Alert"; import Alert from "../../components/Main/Alert/Alert";
import ExploreLogsHeader from "./ExploreLogsHeader/ExploreLogsHeader"; import ExploreLogsHeader from "./ExploreLogsHeader/ExploreLogsHeader";
import "./style.scss"; import "./style.scss";
@ -15,6 +14,7 @@ import ExploreLogsBarChart from "./ExploreLogsBarChart/ExploreLogsBarChart";
import { useFetchLogHits } from "./hooks/useFetchLogHits"; import { useFetchLogHits } from "./hooks/useFetchLogHits";
import { LOGS_ENTRIES_LIMIT } from "../../constants/logs"; import { LOGS_ENTRIES_LIMIT } from "../../constants/logs";
import { getTimeperiodForDuration, relativeTimeOptions } from "../../utils/time"; import { getTimeperiodForDuration, relativeTimeOptions } from "../../utils/time";
import { useSearchParams } from "react-router-dom";
const storageLimit = Number(getFromStorage("LOGS_LIMIT")); const storageLimit = Number(getFromStorage("LOGS_LIMIT"));
const defaultLimit = isNaN(storageLimit) ? LOGS_ENTRIES_LIMIT : storageLimit; const defaultLimit = isNaN(storageLimit) ? LOGS_ENTRIES_LIMIT : storageLimit;
@ -23,6 +23,8 @@ const ExploreLogs: FC = () => {
const { serverUrl } = useAppState(); const { serverUrl } = useAppState();
const { duration, relativeTime, period: periodState } = useTimeState(); const { duration, relativeTime, period: periodState } = useTimeState();
const { setSearchParamsFromKeys } = useSearchParamsFromObject(); const { setSearchParamsFromKeys } = useSearchParamsFromObject();
const [searchParams] = useSearchParams();
const hideChart = useMemo(() => searchParams.get("hide_chart"), [searchParams]);
const [limit, setLimit] = useStateSearchParams(defaultLimit, "limit"); const [limit, setLimit] = useStateSearchParams(defaultLimit, "limit");
const [query, setQuery] = useStateSearchParams("*", "query"); const [query, setQuery] = useStateSearchParams("*", "query");
@ -30,7 +32,7 @@ const ExploreLogs: FC = () => {
const [period, setPeriod] = useState<TimeParams>(periodState); const [period, setPeriod] = useState<TimeParams>(periodState);
const [queryError, setQueryError] = useState<ErrorTypes | string>(""); const [queryError, setQueryError] = useState<ErrorTypes | string>("");
const { logs, isLoading, error, fetchLogs } = useFetchLogs(serverUrl, query, limit); const { logs, isLoading, error, fetchLogs, abortController } = useFetchLogs(serverUrl, query, limit);
const { fetchLogHits, ...dataLogHits } = useFetchLogHits(serverUrl, query); const { fetchLogHits, ...dataLogHits } = useFetchLogHits(serverUrl, query);
const getPeriod = useCallback(() => { const getPeriod = useCallback(() => {
@ -50,7 +52,7 @@ const ExploreLogs: FC = () => {
const newPeriod = getPeriod(); const newPeriod = getPeriod();
setPeriod(newPeriod); setPeriod(newPeriod);
fetchLogs(newPeriod).then((isSuccess) => { fetchLogs(newPeriod).then((isSuccess) => {
isSuccess && fetchLogHits(newPeriod); isSuccess && !hideChart && fetchLogHits(newPeriod);
}).catch(e => e); }).catch(e => e);
setSearchParamsFromKeys( { setSearchParamsFromKeys( {
query, query,
@ -70,10 +72,15 @@ const ExploreLogs: FC = () => {
setQuery(prev => `_stream: ${val === "other" ? "{}" : val} AND (${prev})`); setQuery(prev => `_stream: ${val === "other" ? "{}" : val} AND (${prev})`);
}; };
const handleUpdateQuery = () => { const handleUpdateQuery = useCallback(() => {
setQuery(tmpQuery); if (isLoading || dataLogHits.isLoading) {
handleRunQuery(); abortController.abort && abortController.abort();
}; dataLogHits.abortController.abort && dataLogHits.abortController.abort();
} else {
setQuery(tmpQuery);
handleRunQuery();
}
}, [isLoading, dataLogHits.isLoading]);
useEffect(() => { useEffect(() => {
if (query) handleRunQuery(); if (query) handleRunQuery();
@ -84,6 +91,10 @@ const ExploreLogs: FC = () => {
setTmpQuery(query); setTmpQuery(query);
}, [query]); }, [query]);
useEffect(() => {
!hideChart && fetchLogHits(period);
}, [hideChart]);
return ( return (
<div className="vm-explore-logs"> <div className="vm-explore-logs">
<ExploreLogsHeader <ExploreLogsHeader
@ -93,8 +104,8 @@ const ExploreLogs: FC = () => {
onChange={setTmpQuery} onChange={setTmpQuery}
onChangeLimit={handleChangeLimit} onChangeLimit={handleChangeLimit}
onRun={handleUpdateQuery} onRun={handleUpdateQuery}
isLoading={isLoading || dataLogHits.isLoading}
/> />
{isLoading && <Spinner message={"Loading logs..."}/>}
{error && <Alert variant="error">{error}</Alert>} {error && <Alert variant="error">{error}</Alert>}
{!error && ( {!error && (
<ExploreLogsBarChart <ExploreLogsBarChart
@ -102,10 +113,12 @@ const ExploreLogs: FC = () => {
query={query} query={query}
period={period} period={period}
onApplyFilter={handleApplyFilter} onApplyFilter={handleApplyFilter}
isLoading={isLoading ? false : dataLogHits.isLoading}
/> />
)} )}
<ExploreLogsBody data={logs}/> <ExploreLogsBody
data={logs}
isLoading={isLoading}
/>
</div> </div>
); );
}; };

View file

@ -1,4 +1,4 @@
import React, { FC, useMemo } from "preact/compat"; import React, { FC, useCallback, useMemo } from "preact/compat";
import "./style.scss"; import "./style.scss";
import useDeviceDetect from "../../../hooks/useDeviceDetect"; import useDeviceDetect from "../../../hooks/useDeviceDetect";
import classNames from "classnames"; import classNames from "classnames";
@ -9,7 +9,9 @@ import { AlignedData } from "uplot";
import BarHitsChart from "../../../components/Chart/BarHitsChart/BarHitsChart"; import BarHitsChart from "../../../components/Chart/BarHitsChart/BarHitsChart";
import Alert from "../../../components/Main/Alert/Alert"; import Alert from "../../../components/Main/Alert/Alert";
import { TimeParams } from "../../../types"; import { TimeParams } from "../../../types";
import Spinner from "../../../components/Main/Spinner/Spinner"; import LineLoader from "../../../components/Main/LineLoader/LineLoader";
import { useSearchParams } from "react-router-dom";
import { getHitsTimeParams } from "../../../utils/logs";
interface Props { interface Props {
query: string; query: string;
@ -23,27 +25,46 @@ interface Props {
const ExploreLogsBarChart: FC<Props> = ({ logHits, period, error, isLoading, onApplyFilter }) => { const ExploreLogsBarChart: FC<Props> = ({ logHits, period, error, isLoading, onApplyFilter }) => {
const { isMobile } = useDeviceDetect(); const { isMobile } = useDeviceDetect();
const timeDispatch = useTimeDispatch(); const timeDispatch = useTimeDispatch();
const [searchParams] = useSearchParams();
const hideChart = useMemo(() => searchParams.get("hide_chart"), [searchParams]);
const getXAxis = (timestamps: string[]): number[] => { const getYAxes = (logHits: LogHits[], timestamps: number[]) => {
return (timestamps.map(t => t ? dayjs(t).unix() : null)
.filter(Boolean) as number[])
.sort((a, b) => a - b);
};
const getYAxes = (logHits: LogHits[], timestamps: string[]) => {
return logHits.map(hits => { return logHits.map(hits => {
return timestamps.map(t => { const timestampValueMap = new Map();
const index = hits.timestamps.findIndex(ts => ts === t); hits.timestamps.forEach((ts, idx) => {
return index === -1 ? null : hits.values[index] || null; const unixTime = dayjs(ts).unix();
timestampValueMap.set(unixTime, hits.values[idx] || null);
}); });
return timestamps.map(t => timestampValueMap.get(t) || null);
}); });
}; };
const generateTimestamps = useCallback((date: dayjs.Dayjs) => {
const result: number[] = [];
const { start, end, step } = getHitsTimeParams(period);
const stepsToFirstTimestamp = Math.ceil(start.diff(date, "milliseconds") / step);
let firstTimestamp = date.add(stepsToFirstTimestamp * step, "milliseconds");
// If the first timestamp is before 'start', set it to 'start'
if (firstTimestamp.isBefore(start)) {
firstTimestamp = start.clone();
}
// Calculate the total number of steps from 'firstTimestamp' to 'end'
const totalSteps = Math.floor(end.diff(firstTimestamp, "milliseconds") / step);
for (let i = 0; i <= totalSteps; i++) {
result.push(firstTimestamp.add(i * step, "milliseconds").unix());
}
return result;
}, [period]);
const data = useMemo(() => { const data = useMemo(() => {
if (!logHits.length) return [[], []] as AlignedData; if (!logHits.length) return [[], []] as AlignedData;
const timestamps = Array.from(new Set(logHits.map(l => l.timestamps).flat())); const xAxis = generateTimestamps(dayjs(logHits[0].timestamps[0]));
const xAxis = getXAxis(timestamps); const yAxes = getYAxes(logHits, xAxis);
const yAxes = getYAxes(logHits, timestamps);
return [xAxis, ...yAxes] as AlignedData; return [xAxis, ...yAxes] as AlignedData;
}, [logHits]); }, [logHits]);
@ -51,14 +72,16 @@ const ExploreLogsBarChart: FC<Props> = ({ logHits, period, error, isLoading, onA
const noData = data.every(d => d.length === 0); const noData = data.every(d => d.length === 0);
const noTimestamps = data[0].length === 0; const noTimestamps = data[0].length === 0;
const noValues = data[1].length === 0; const noValues = data[1].length === 0;
if (noData) { if (hideChart) {
return "Chart hidden. Hits updates paused.";
} else if (noData) {
return "No logs volume available\nNo volume information available for the current queries and time range."; return "No logs volume available\nNo volume information available for the current queries and time range.";
} else if (noTimestamps) { } else if (noTimestamps) {
return "No timestamp information available for the current queries and time range."; return "No timestamp information available for the current queries and time range.";
} else if (noValues) { } else if (noValues) {
return "No value information available for the current queries and time range."; return "No value information available for the current queries and time range.";
} return ""; } return "";
}, [data]); }, [data, hideChart]);
const setPeriod = ({ from, to }: {from: Date, to: Date}) => { const setPeriod = ({ from, to }: {from: Date, to: Date}) => {
timeDispatch({ type: "SET_PERIOD", payload: { from, to } }); timeDispatch({ type: "SET_PERIOD", payload: { from, to } });
@ -72,10 +95,7 @@ const ExploreLogsBarChart: FC<Props> = ({ logHits, period, error, isLoading, onA
"vm-block_mobile": isMobile, "vm-block_mobile": isMobile,
})} })}
> >
{isLoading && <Spinner {isLoading && <LineLoader/>}
message={"Loading hits stats..."}
containerStyles={{ position: "absolute" }}
/>}
{!error && noDataMessage && ( {!error && noDataMessage && (
<div className="vm-explore-logs-chart__empty"> <div className="vm-explore-logs-chart__empty">
<Alert variant="info">{noDataMessage}</Alert> <Alert variant="info">{noDataMessage}</Alert>

View file

@ -13,8 +13,12 @@
} }
&__empty { &__empty {
display: flex;
align-items: center;
justify-content: center;
position: absolute; position: absolute;
transform: translateY(-25px); top: 0;
bottom: 0;
z-index: 2; z-index: 2;
} }
} }

View file

@ -16,9 +16,11 @@ import TableLogs from "./TableLogs";
import GroupLogs from "../GroupLogs/GroupLogs"; import GroupLogs from "../GroupLogs/GroupLogs";
import { DATE_TIME_FORMAT } from "../../../constants/date"; import { DATE_TIME_FORMAT } from "../../../constants/date";
import { marked } from "marked"; import { marked } from "marked";
import LineLoader from "../../../components/Main/LineLoader/LineLoader";
export interface ExploreLogBodyProps { export interface ExploreLogBodyProps {
data: Logs[]; data: Logs[];
isLoading: boolean;
} }
enum DisplayType { enum DisplayType {
@ -33,7 +35,7 @@ const tabs = [
{ label: "JSON", value: DisplayType.json, icon: <CodeIcon/> }, { label: "JSON", value: DisplayType.json, icon: <CodeIcon/> },
]; ];
const ExploreLogsBody: FC<ExploreLogBodyProps> = ({ data }) => { const ExploreLogsBody: FC<ExploreLogBodyProps> = ({ data, isLoading }) => {
const { isMobile } = useDeviceDetect(); const { isMobile } = useDeviceDetect();
const { timezone } = useTimeState(); const { timezone } = useTimeState();
const { setSearchParamsFromKeys } = useSearchParamsFromObject(); const { setSearchParamsFromKeys } = useSearchParamsFromObject();
@ -75,6 +77,7 @@ const ExploreLogsBody: FC<ExploreLogBodyProps> = ({ data }) => {
"vm-block_mobile": isMobile, "vm-block_mobile": isMobile,
})} })}
> >
{isLoading && <LineLoader/>}
<div <div
className={classNames({ className={classNames({
"vm-explore-logs-body-header": true, "vm-explore-logs-body-header": true,

View file

@ -1,6 +1,8 @@
@use "src/styles/variables" as *; @use "src/styles/variables" as *;
.vm-explore-logs-body { .vm-explore-logs-body {
position: relative;
&-header { &-header {
margin: -$padding-medium 0-$padding-medium 0; margin: -$padding-medium 0-$padding-medium 0;

View file

@ -1,5 +1,5 @@
import React, { FC, useEffect, useState } from "preact/compat"; import React, { FC, useEffect, useState } from "preact/compat";
import { InfoIcon, PlayIcon, WikiIcon } from "../../../components/Main/Icons"; import { InfoIcon, PlayIcon, SpinnerIcon, WikiIcon } from "../../../components/Main/Icons";
import "./style.scss"; import "./style.scss";
import classNames from "classnames"; import classNames from "classnames";
import useDeviceDetect from "../../../hooks/useDeviceDetect"; import useDeviceDetect from "../../../hooks/useDeviceDetect";
@ -11,6 +11,7 @@ export interface ExploreLogHeaderProps {
query: string; query: string;
limit: number; limit: number;
error?: string; error?: string;
isLoading: boolean;
onChange: (val: string) => void; onChange: (val: string) => void;
onChangeLimit: (val: number) => void; onChangeLimit: (val: number) => void;
onRun: () => void; onRun: () => void;
@ -20,6 +21,7 @@ const ExploreLogsHeader: FC<ExploreLogHeaderProps> = ({
query, query,
limit, limit,
error, error,
isLoading,
onChange, onChange,
onChangeLimit, onChangeLimit,
onRun, onRun,
@ -94,13 +96,16 @@ const ExploreLogsHeader: FC<ExploreLogHeaderProps> = ({
Documentation Documentation
</a> </a>
</div> </div>
<div className="vm-explore-logs-header-bottom__execute"> <div className="vm-explore-logs-header-bottom-execute">
<Button <Button
startIcon={<PlayIcon/>} startIcon={isLoading ? <SpinnerIcon/> : <PlayIcon/>}
onClick={onRun} onClick={onRun}
fullWidth fullWidth
> >
Execute Query <span className="vm-explore-logs-header-bottom-execute__text">
{isLoading ? "Cancel Query" : "Execute Query"}
</span>
<span className="vm-explore-logs-header-bottom-execute__text_hidden">Execute Query</span>
</Button> </Button>
</div> </div>
</div> </div>

View file

@ -29,8 +29,18 @@
flex-grow: 1; flex-grow: 1;
} }
&__execute { &-execute {
position: relative;
display: grid; display: grid;
&__text {
position: absolute;
&_hidden {
position: relative;
visibility: hidden;
}
}
} }
&-helpful { &-helpful {

View file

@ -7,7 +7,6 @@
display: flex; display: flex;
align-items: center; align-items: center;
justify-content: flex-end; justify-content: flex-end;
gap: $padding-global;
&-keys { &-keys {
max-height: 300px; max-height: 300px;

View file

@ -2,9 +2,8 @@ import { useCallback, useMemo, useRef, useState } from "preact/compat";
import { getLogHitsUrl } from "../../../api/logs"; import { getLogHitsUrl } from "../../../api/logs";
import { ErrorTypes, TimeParams } from "../../../types"; import { ErrorTypes, TimeParams } from "../../../types";
import { LogHits } from "../../../api/types"; import { LogHits } from "../../../api/types";
import dayjs from "dayjs";
import { LOGS_BARS_VIEW } from "../../../constants/logs";
import { useSearchParams } from "react-router-dom"; import { useSearchParams } from "react-router-dom";
import { getHitsTimeParams } from "../../../utils/logs";
export const useFetchLogHits = (server: string, query: string) => { export const useFetchLogHits = (server: string, query: string) => {
const [searchParams] = useSearchParams(); const [searchParams] = useSearchParams();
@ -17,10 +16,7 @@ export const useFetchLogHits = (server: string, query: string) => {
const url = useMemo(() => getLogHitsUrl(server), [server]); const url = useMemo(() => getLogHitsUrl(server), [server]);
const getOptions = (query: string, period: TimeParams, signal: AbortSignal) => { const getOptions = (query: string, period: TimeParams, signal: AbortSignal) => {
const start = dayjs(period.start * 1000); const { start, end, step } = getHitsTimeParams(period);
const end = dayjs(period.end * 1000);
const totalSeconds = end.diff(start, "milliseconds");
const step = Math.ceil(totalSeconds / LOGS_BARS_VIEW) || 1;
return { return {
signal, signal,
@ -118,5 +114,6 @@ export const useFetchLogHits = (server: string, query: string) => {
isLoading: Object.values(isLoading).some(s => s), isLoading: Object.values(isLoading).some(s => s),
error, error,
fetchLogHits, fetchLogHits,
abortController: abortControllerRef.current
}; };
}; };

View file

@ -81,5 +81,6 @@ export const useFetchLogs = (server: string, query: string, limit: number) => {
isLoading: Object.values(isLoading).some(s => s), isLoading: Object.values(isLoading).some(s => s),
error, error,
fetchLogs, fetchLogs,
abortController: abortControllerRef.current
}; };
}; };

View file

@ -0,0 +1,53 @@
import { useAppState } from "../../../state/common/StateContext";
import { useState } from "react";
import { ErrorTypes } from "../../../types";
import { useSearchParams } from "react-router-dom";
import { getRetentionFiltersDebug } from "../../../api/retention-filters-debug";
import { useCallback } from "preact/compat";
export const useDebugRetentionFilters = () => {
const { serverUrl } = useAppState();
const [searchParams, setSearchParams] = useSearchParams();
const [data, setData] = useState<Map<string, string>>(new Map());
const [loading, setLoading] = useState(false);
const [metricsError, setMetricsError] = useState<ErrorTypes | string>();
const [flagsError, setFlagsError] = useState<ErrorTypes | string>();
const [error, setError] = useState<ErrorTypes | string>();
const fetchData = useCallback(async (flags: string, metrics: string) => {
metrics ? setMetricsError("") : setMetricsError("metrics are required");
flags ? setFlagsError("") : setFlagsError("flags are required");
if (!metrics || !flags) return;
searchParams.set("flags", flags);
searchParams.set("metrics", metrics);
setSearchParams(searchParams);
const fetchUrl = getRetentionFiltersDebug(serverUrl, flags, metrics);
setLoading(true);
try {
const response = await fetch(fetchUrl);
const resp = await response.json();
setData(new Map(Object.entries(resp.result || {})));
setMetricsError(resp.error?.metrics || "");
setFlagsError(resp.error?.flags || "");
setError("");
} catch (e) {
if (e instanceof Error && e.name !== "AbortError") {
setError(`${e.name}: ${e.message}`);
}
}
setLoading(false);
}, [serverUrl]);
return {
data,
error: error,
metricsError: metricsError,
flagsError: flagsError,
loading,
applyFilters: fetchData
};
};

View file

@ -0,0 +1,137 @@
import React, { FC, useEffect } from "preact/compat";
import "./style.scss";
import TextField from "../../components/Main/TextField/TextField";
import { useCallback, useState } from "react";
import Button from "../../components/Main/Button/Button";
import { PlayIcon, WikiIcon } from "../../components/Main/Icons";
import { useDebugRetentionFilters } from "./hooks/useDebugRetentionFilters";
import Spinner from "../../components/Main/Spinner/Spinner";
import { useSearchParams } from "react-router-dom";
const example = {
flags: `-retentionPeriod=1y
-retentionFilters={env!="prod"}:2w
`,
metrics: `up
up{env="dev"}
up{env="prod"}`,
};
const RetentionFilters: FC = () => {
const [searchParams] = useSearchParams();
const { data, loading, error, metricsError, flagsError, applyFilters } = useDebugRetentionFilters();
const [metrics, setMetrics] = useState(searchParams.get("metrics") || "");
const [flags, setFlags] = useState(searchParams.get("flags") || "");
const handleMetricsChangeInput = useCallback((val: string) => {
setMetrics(val);
}, [setMetrics]);
const handleFlagsChangeInput = useCallback((val: string) => {
setFlags(val);
}, [setFlags]);
const handleApplyFilters = useCallback(() => {
applyFilters(flags, metrics);
}, [applyFilters, flags, metrics]);
const handleRunExample = useCallback(() => {
const { flags, metrics } = example;
setFlags(flags);
setMetrics(metrics);
applyFilters(flags, metrics);
searchParams.set("flags", flags);
searchParams.set("metrics", metrics);
}, [example, setFlags, setMetrics, searchParams]);
useEffect(() => {
if (flags && metrics) handleApplyFilters();
}, []);
const rows = [];
for (const [key, value] of data) {
rows.push(<tr className="vm-table__row">
<td className="vm-table-cell">{key}</td>
<td className="vm-table-cell">{value}</td>
</tr>);
}
return (
<section className="vm-retention-filters">
{loading && <Spinner/>}
<div className="vm-retention-filters-body vm-block">
<div className="vm-retention-filters-body__expr">
<div className="vm-retention-filters-body__title">
<p>Provide a list of flags for retention configuration. Note that
only <code>-retentionPeriod</code> and <code>-retentionFilters</code> flags are
supported.</p>
</div>
<TextField
type="textarea"
label="Flags"
value={flags}
error={error || flagsError}
autofocus
onEnter={handleApplyFilters}
onChange={handleFlagsChangeInput}
placeholder={"-retentionPeriod=4w -retentionFilters=up{env=\"dev\"}:2w"}
/>
</div>
<div className="vm-retention-filters-body__expr">
<div className="vm-retention-filters-body__title">
<p>Provide a list of metrics to check retention configuration.</p>
</div>
<TextField
type="textarea"
label="Metrics"
value={metrics}
error={error || metricsError}
onEnter={handleApplyFilters}
onChange={handleMetricsChangeInput}
placeholder={"up{env=\"dev\"}\nup{env=\"prod\"}\n"}
/>
</div>
<div className="vm-retention-filters-body__result">
<table className="vm-table">
<thead className="vm-table-header">
<tr>
<th className="vm-table-cell vm-table-cell_header">Metric</th>
<th className="vm-table-cell vm-table-cell_header">Applied retention</th>
</tr>
</thead>
<tbody className="vm-table-body">
{rows}
</tbody>
</table>
</div>
<div className="vm-retention-filters-body-top">
<a
className="vm-link vm-link_with-icon"
target="_blank"
href="https://docs.victoriametrics.com/#retention-filters"
rel="help noreferrer"
>
<WikiIcon/>
Documentation
</a>
<Button
variant="text"
onClick={handleRunExample}
>
Try example
</Button>
<Button
variant="contained"
onClick={handleApplyFilters}
startIcon={<PlayIcon/>}
>
Apply
</Button>
</div>
</div>
</section>
);
};
export default RetentionFilters;

View file

@ -0,0 +1,46 @@
@use "src/styles/variables" as *;
.vm-retention-filters {
display: grid;
gap: $padding-medium;
&-body {
display: grid;
gap: $padding-global;
align-items: flex-start;
width: 100%;
&__title {
margin-bottom: $padding-medium;
}
&-top {
display: flex;
gap: $padding-small;
align-items: center;
justify-content: flex-end;
}
&__expr textarea {
min-height: 200px;
}
&__result textarea {
min-height: 60px;
}
code {
background-color: var(--color-hover-black);
border-radius: 6px;
font-size: 85%;
padding: .2em .4em;
}
textarea {
font-family: $font-family-monospace;
overflow: auto;
width: 100%;
height: 100%;
}
}
}

View file

@ -15,6 +15,8 @@ const router = {
icons: "/icons", icons: "/icons",
anomaly: "/anomaly", anomaly: "/anomaly",
query: "/query", query: "/query",
downsamplingDebug: "/downsampling-filters-debug",
retentionDebug: "/retention-filters-debug",
}; };
export interface RouterOptionsHeader { export interface RouterOptionsHeader {
@ -108,6 +110,14 @@ export const routerOptions: {[key: string]: RouterOptions} = {
[router.query]: { [router.query]: {
title: "Query", title: "Query",
...routerOptionsDefault ...routerOptionsDefault
},
[router.downsamplingDebug]: {
title: "Downsampling filters debug",
header: {}
},
[router.retentionDebug]: {
title: "Retention filters debug",
header: {}
} }
}; };

View file

@ -0,0 +1,92 @@
import router, { routerOptions } from "./index";
export enum NavigationItemType {
internalLink,
externalLink,
}
export interface NavigationItem {
label?: string,
value?: string,
hide?: boolean
submenu?: NavigationItem[],
type?: NavigationItemType,
}
interface NavigationConfig {
serverUrl: string,
isEnterpriseLicense: boolean,
showPredefinedDashboards: boolean,
showAlertLink: boolean,
}
/**
* Special case for alert link
*/
const getAlertLink = (url: string, showAlertLink: boolean) => {
// see more https://docs.victoriametrics.com/cluster-victoriametrics/?highlight=vmalertproxyurl#vmalert
return {
label: "Alerts",
value: `${url}/vmalert`,
type: NavigationItemType.externalLink,
hide: !showAlertLink,
};
};
/**
* Submenu for Tools tab
*/
const getToolsNav = (isEnterpriseLicense: boolean) => [
{ value: router.trace },
{ value: router.queryAnalyzer },
{ value: router.withTemplate },
{ value: router.relabel },
{ value: router.downsamplingDebug, hide: !isEnterpriseLicense },
{ value: router.retentionDebug, hide: !isEnterpriseLicense },
];
/**
* Submenu for Explore tab
*/
const getExploreNav = () => [
{ value: router.metrics },
{ value: router.cardinality },
{ value: router.topQueries },
{ value: router.activeQueries },
];
/**
* Default navigation menu
*/
export const getDefaultNavigation = ({
serverUrl,
isEnterpriseLicense,
showPredefinedDashboards,
showAlertLink,
}: NavigationConfig): NavigationItem[] => [
{ value: router.home },
{ label: "Explore", submenu: getExploreNav() },
{ label: "Tools", submenu: getToolsNav(isEnterpriseLicense) },
{ value: router.dashboards, hide: !showPredefinedDashboards },
getAlertLink(serverUrl, showAlertLink),
];
/**
* VictoriaLogs navigation menu
*/
export const getLogsNavigation = (): NavigationItem[] => [
{
label: routerOptions[router.logs].title,
value: router.home,
},
];
/**
* vmanomaly navigation menu
*/
export const getAnomalyNavigation = (): NavigationItem[] => [
{
label: routerOptions[router.anomaly].title,
value: router.home,
},
];

View file

@ -0,0 +1,43 @@
import { getAppModeEnable } from "../utils/app-mode";
import { useDashboardsState } from "../state/dashboards/DashboardsStateContext";
import { useAppState } from "../state/common/StateContext";
import { useMemo } from "preact/compat";
import { AppType } from "../types/appType";
import { processNavigationItems } from "./utils";
import { getAnomalyNavigation, getDefaultNavigation, getLogsNavigation } from "./navigation";
const appType = process.env.REACT_APP_TYPE;
const useNavigationMenu = () => {
const appModeEnable = getAppModeEnable();
const { dashboardsSettings } = useDashboardsState();
const { serverUrl, flags, appConfig } = useAppState();
const isEnterpriseLicense = appConfig.license?.type === "enterprise";
const showAlertLink = Boolean(flags["vmalert.proxyURL"]);
const showPredefinedDashboards = Boolean(!appModeEnable && dashboardsSettings.length);
const navigationConfig = useMemo(() => ({
serverUrl,
isEnterpriseLicense,
showAlertLink,
showPredefinedDashboards
}), [serverUrl, isEnterpriseLicense, showAlertLink, showPredefinedDashboards]);
const menu = useMemo(() => {
switch (appType) {
case AppType.logs:
return getLogsNavigation();
case AppType.anomaly:
return getAnomalyNavigation();
default:
return getDefaultNavigation(navigationConfig);
}
}, [navigationConfig]);
return processNavigationItems(menu);
};
export default useNavigationMenu;

View file

@ -0,0 +1,30 @@
import { routerOptions } from "./index";
import { NavigationItem } from "./navigation";
const routePathToTitle = (path: string): string => {
try {
return path
.replace(/^\/+/, "") // Remove leading slashes
.replace(/-/g, " ") // Replace hyphens with spaces
.trim() // Trim whitespace from both ends
.replace(/^\w/, (c) => c.toUpperCase()); // Capitalize the first character
} catch (e) {
return path;
}
};
export const processNavigationItems = (items: NavigationItem[]): NavigationItem[] => {
return items.filter((item) => !item.hide).map((item) => {
const newItem: NavigationItem = { ...item };
if (newItem.value && !newItem.label) {
newItem.label = routerOptions[newItem.value]?.title || routePathToTitle(newItem.value);
}
if (newItem.submenu && newItem.submenu.length > 0) {
newItem.submenu = processNavigationItems(newItem.submenu);
}
return newItem;
});
};

View file

@ -1,7 +1,7 @@
import { getDefaultServer } from "../../utils/default-server-url"; import { getDefaultServer } from "../../utils/default-server-url";
import { getQueryStringValue } from "../../utils/query-string"; import { getQueryStringValue } from "../../utils/query-string";
import { getFromStorage, saveToStorage } from "../../utils/storage"; import { getFromStorage, saveToStorage } from "../../utils/storage";
import { Theme } from "../../types"; import { AppConfig, Theme } from "../../types";
import { isDarkTheme } from "../../utils/theme"; import { isDarkTheme } from "../../utils/theme";
import { removeTrailingSlash } from "../../utils/url"; import { removeTrailingSlash } from "../../utils/url";
@ -11,6 +11,7 @@ export interface AppState {
theme: Theme; theme: Theme;
isDarkTheme: boolean | null; isDarkTheme: boolean | null;
flags: Record<string, string | null>; flags: Record<string, string | null>;
appConfig: AppConfig
} }
export type Action = export type Action =
@ -18,6 +19,7 @@ export type Action =
| { type: "SET_THEME", payload: Theme } | { type: "SET_THEME", payload: Theme }
| { type: "SET_TENANT_ID", payload: string } | { type: "SET_TENANT_ID", payload: string }
| { type: "SET_FLAGS", payload: Record<string, string | null> } | { type: "SET_FLAGS", payload: Record<string, string | null> }
| { type: "SET_APP_CONFIG", payload: AppConfig }
| { type: "SET_DARK_THEME" } | { type: "SET_DARK_THEME" }
const tenantId = getQueryStringValue("g0.tenantID", "") as string; const tenantId = getQueryStringValue("g0.tenantID", "") as string;
@ -28,6 +30,7 @@ export const initialState: AppState = {
theme: (getFromStorage("THEME") || Theme.system) as Theme, theme: (getFromStorage("THEME") || Theme.system) as Theme,
isDarkTheme: null, isDarkTheme: null,
flags: {}, flags: {},
appConfig: {}
}; };
export function reducer(state: AppState, action: Action): AppState { export function reducer(state: AppState, action: Action): AppState {
@ -58,6 +61,11 @@ export function reducer(state: AppState, action: Action): AppState {
...state, ...state,
flags: action.payload flags: action.payload
}; };
case "SET_APP_CONFIG":
return {
...state,
appConfig: action.payload
};
default: default:
throw new Error(); throw new Error();
} }

View file

@ -165,3 +165,9 @@ export enum QueryContextType {
label = "label", label = "label",
labelValue = "labelValue", labelValue = "labelValue",
} }
export interface AppConfig {
license?: {
type?: "enterprise" | "opensource";
}
}

View file

@ -1,4 +1,16 @@
import { TimeParams } from "../types";
import dayjs from "dayjs";
import { LOGS_BARS_VIEW } from "../constants/logs";
export const getStreamPairs = (value: string): string[] => { export const getStreamPairs = (value: string): string[] => {
const pairs = /^{.+}$/.test(value) ? value.slice(1, -1).split(",") : [value]; const pairs = /^{.+}$/.test(value) ? value.slice(1, -1).split(",") : [value];
return pairs.filter(Boolean); return pairs.filter(Boolean);
}; };
export const getHitsTimeParams = (period: TimeParams) => {
const start = dayjs(period.start * 1000);
const end = dayjs(period.end * 1000);
const totalSeconds = end.diff(start, "milliseconds");
const step = Math.ceil(totalSeconds / LOGS_BARS_VIEW) || 1;
return { start, end, step };
};

View file

@ -137,14 +137,14 @@ export const barDisp = (stroke: Stroke, fill: Fill): Disp => {
export const delSeries = (u: uPlot) => { export const delSeries = (u: uPlot) => {
for (let i = u.series.length - 1; i >= 0; i--) { for (let i = u.series.length - 1; i >= 0; i--) {
u.delSeries(i); i && u.delSeries(i);
} }
}; };
export const addSeries = (u: uPlot, series: uPlotSeries[], spanGaps = false) => { export const addSeries = (u: uPlot, series: uPlotSeries[], spanGaps = false) => {
series.forEach((s) => { series.forEach((s,i) => {
if (s.label) s.spanGaps = spanGaps; if (s.label) s.spanGaps = spanGaps;
u.addSeries(s); i && u.addSeries(s);
}); });
}; };

View file

@ -6200,7 +6200,7 @@
"type": "prometheus", "type": "prometheus",
"uid": "$ds" "uid": "$ds"
}, },
"description": "How many datapoints are in RAM queue waiting to be written into storage. The number of pending data points should be in the range from 0 to `2*<ingestion_rate>`, since VictoriaMetrics pushes pending data to persistent storage every second. The index datapoints value in general is much lower.", "description": "How many datapoints are in RAM queue waiting to be written into storage. The number of pending data points should be in the range from 0 to `3*<ingestion_rate>`, since VictoriaMetrics pushes pending data to persistent storage every two seconds. The index datapoints value in general is much lower.",
"fieldConfig": { "fieldConfig": {
"defaults": { "defaults": {
"color": { "color": {

Some files were not shown because too many files have changed in this diff Show more