mirror of
https://github.com/VictoriaMetrics/VictoriaMetrics.git
synced 2025-01-30 15:22:07 +00:00
Merge branch 'public-single-node' into pmm-6401-read-prometheus-data-files
This commit is contained in:
commit
a092df3f84
68 changed files with 1489 additions and 255 deletions
|
@ -2503,4 +2503,6 @@ Pass `-help` to VictoriaMetrics in order to see the list of supported command-li
|
|||
Show VictoriaMetrics version
|
||||
-vmalert.proxyURL string
|
||||
Optional URL for proxying requests to vmalert. For example, if -vmalert.proxyURL=http://vmalert:8880 , then alerting API requests such as /api/v1/rules from Grafana will be proxied to http://vmalert:8880/api/v1/rules
|
||||
-vmui.customDashboardsPath string
|
||||
Optional path to vmui dashboards. See https://github.com/VictoriaMetrics/VictoriaMetrics/tree/master/app/vmui/packages/vmui/public/dashboards
|
||||
```
|
||||
|
|
319
app/vmctl/remote_read_test.go
Normal file
319
app/vmctl/remote_read_test.go
Normal file
|
@ -0,0 +1,319 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/VictoriaMetrics/VictoriaMetrics/app/vmctl/remoteread"
|
||||
"github.com/VictoriaMetrics/VictoriaMetrics/app/vmctl/stepper"
|
||||
"github.com/VictoriaMetrics/VictoriaMetrics/app/vmctl/testdata/servers_integration_test"
|
||||
"github.com/VictoriaMetrics/VictoriaMetrics/app/vmctl/vm"
|
||||
"github.com/prometheus/prometheus/prompb"
|
||||
)
|
||||
|
||||
func TestRemoteRead(t *testing.T) {
|
||||
|
||||
var testCases = []struct {
|
||||
name string
|
||||
remoteReadConfig remoteread.Config
|
||||
vmCfg vm.Config
|
||||
start string
|
||||
end string
|
||||
numOfSamples int64
|
||||
numOfSeries int64
|
||||
rrp remoteReadProcessor
|
||||
chunk string
|
||||
remoteReadSeries func(start, end, numOfSeries, numOfSamples int64) []*prompb.TimeSeries
|
||||
expectedSeries []vm.TimeSeries
|
||||
}{
|
||||
{
|
||||
name: "step minute on minute time range",
|
||||
remoteReadConfig: remoteread.Config{Addr: "", LabelName: "__name__", LabelValue: ".*"},
|
||||
vmCfg: vm.Config{Addr: "", Concurrency: 1, DisableProgressBar: true},
|
||||
start: "2022-11-26T11:23:05+02:00",
|
||||
end: "2022-11-26T11:24:05+02:00",
|
||||
numOfSamples: 2,
|
||||
numOfSeries: 3,
|
||||
chunk: stepper.StepMinute,
|
||||
remoteReadSeries: remote_read_integration.GenerateRemoteReadSeries,
|
||||
expectedSeries: []vm.TimeSeries{
|
||||
{
|
||||
Name: "vm_metric_1",
|
||||
LabelPairs: []vm.LabelPair{{Name: "job", Value: "0"}},
|
||||
Timestamps: []int64{1669454585000, 1669454615000},
|
||||
Values: []float64{0, 0},
|
||||
},
|
||||
{
|
||||
Name: "vm_metric_1",
|
||||
LabelPairs: []vm.LabelPair{{Name: "job", Value: "1"}},
|
||||
Timestamps: []int64{1669454585000, 1669454615000},
|
||||
Values: []float64{100, 100},
|
||||
},
|
||||
{
|
||||
Name: "vm_metric_1",
|
||||
LabelPairs: []vm.LabelPair{{Name: "job", Value: "2"}},
|
||||
Timestamps: []int64{1669454585000, 1669454615000},
|
||||
Values: []float64{200, 200},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "step month on month time range",
|
||||
remoteReadConfig: remoteread.Config{Addr: "", LabelName: "__name__", LabelValue: ".*"},
|
||||
vmCfg: vm.Config{Addr: "", Concurrency: 1, DisableProgressBar: true},
|
||||
start: "2022-09-26T11:23:05+02:00",
|
||||
end: "2022-11-26T11:24:05+02:00",
|
||||
numOfSamples: 2,
|
||||
numOfSeries: 3,
|
||||
chunk: stepper.StepMonth,
|
||||
remoteReadSeries: remote_read_integration.GenerateRemoteReadSeries,
|
||||
expectedSeries: []vm.TimeSeries{
|
||||
{
|
||||
Name: "vm_metric_1",
|
||||
LabelPairs: []vm.LabelPair{{Name: "job", Value: "0"}},
|
||||
Timestamps: []int64{1664184185000},
|
||||
Values: []float64{0},
|
||||
},
|
||||
{
|
||||
Name: "vm_metric_1",
|
||||
LabelPairs: []vm.LabelPair{{Name: "job", Value: "1"}},
|
||||
Timestamps: []int64{1664184185000},
|
||||
Values: []float64{100},
|
||||
},
|
||||
{
|
||||
Name: "vm_metric_1",
|
||||
LabelPairs: []vm.LabelPair{{Name: "job", Value: "2"}},
|
||||
Timestamps: []int64{1664184185000},
|
||||
Values: []float64{200},
|
||||
},
|
||||
{
|
||||
Name: "vm_metric_1",
|
||||
LabelPairs: []vm.LabelPair{{Name: "job", Value: "0"}},
|
||||
Timestamps: []int64{1666819415000},
|
||||
Values: []float64{0},
|
||||
},
|
||||
{
|
||||
Name: "vm_metric_1",
|
||||
LabelPairs: []vm.LabelPair{{Name: "job", Value: "1"}},
|
||||
Timestamps: []int64{1666819415000},
|
||||
Values: []float64{100},
|
||||
},
|
||||
{
|
||||
Name: "vm_metric_1",
|
||||
LabelPairs: []vm.LabelPair{{Name: "job", Value: "2"}},
|
||||
Timestamps: []int64{1666819415000},
|
||||
Values: []float64{200}},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range testCases {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
remoteReadServer := remote_read_integration.NewRemoteReadServer(t)
|
||||
defer remoteReadServer.Close()
|
||||
remoteWriteServer := remote_read_integration.NewRemoteWriteServer(t)
|
||||
defer remoteWriteServer.Close()
|
||||
|
||||
tt.remoteReadConfig.Addr = remoteReadServer.URL()
|
||||
|
||||
rr, err := remoteread.NewClient(tt.remoteReadConfig)
|
||||
if err != nil {
|
||||
t.Fatalf("error create remote read client: %s", err)
|
||||
}
|
||||
|
||||
start, err := time.Parse(time.RFC3339, tt.start)
|
||||
if err != nil {
|
||||
t.Fatalf("Error parse start time: %s", err)
|
||||
}
|
||||
|
||||
end, err := time.Parse(time.RFC3339, tt.end)
|
||||
if err != nil {
|
||||
t.Fatalf("Error parse end time: %s", err)
|
||||
}
|
||||
|
||||
rrs := tt.remoteReadSeries(start.Unix(), end.Unix(), tt.numOfSeries, tt.numOfSamples)
|
||||
|
||||
remoteReadServer.SetRemoteReadSeries(rrs)
|
||||
remoteWriteServer.ExpectedSeries(tt.expectedSeries)
|
||||
|
||||
tt.vmCfg.Addr = remoteWriteServer.URL()
|
||||
|
||||
importer, err := vm.NewImporter(tt.vmCfg)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to create VM importer: %s", err)
|
||||
}
|
||||
defer importer.Close()
|
||||
|
||||
rmp := remoteReadProcessor{
|
||||
src: rr,
|
||||
dst: importer,
|
||||
filter: remoteReadFilter{
|
||||
timeStart: &start,
|
||||
timeEnd: &end,
|
||||
chunk: tt.chunk,
|
||||
},
|
||||
cc: 1,
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
err = rmp.run(ctx, true, false)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to run remote read processor: %s", err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestSteamRemoteRead(t *testing.T) {
|
||||
|
||||
var testCases = []struct {
|
||||
name string
|
||||
remoteReadConfig remoteread.Config
|
||||
vmCfg vm.Config
|
||||
start string
|
||||
end string
|
||||
numOfSamples int64
|
||||
numOfSeries int64
|
||||
rrp remoteReadProcessor
|
||||
chunk string
|
||||
remoteReadSeries func(start, end, numOfSeries, numOfSamples int64) []*prompb.TimeSeries
|
||||
expectedSeries []vm.TimeSeries
|
||||
}{
|
||||
{
|
||||
name: "step minute on minute time range",
|
||||
remoteReadConfig: remoteread.Config{Addr: "", LabelName: "__name__", LabelValue: ".*", UseStream: true},
|
||||
vmCfg: vm.Config{Addr: "", Concurrency: 1, DisableProgressBar: true},
|
||||
start: "2022-11-26T11:23:05+02:00",
|
||||
end: "2022-11-26T11:24:05+02:00",
|
||||
numOfSamples: 2,
|
||||
numOfSeries: 3,
|
||||
chunk: stepper.StepMinute,
|
||||
remoteReadSeries: remote_read_integration.GenerateRemoteReadSeries,
|
||||
expectedSeries: []vm.TimeSeries{
|
||||
{
|
||||
Name: "vm_metric_1",
|
||||
LabelPairs: []vm.LabelPair{{Name: "job", Value: "0"}},
|
||||
Timestamps: []int64{1669454585000, 1669454615000},
|
||||
Values: []float64{0, 0},
|
||||
},
|
||||
{
|
||||
Name: "vm_metric_1",
|
||||
LabelPairs: []vm.LabelPair{{Name: "job", Value: "1"}},
|
||||
Timestamps: []int64{1669454585000, 1669454615000},
|
||||
Values: []float64{100, 100},
|
||||
},
|
||||
{
|
||||
Name: "vm_metric_1",
|
||||
LabelPairs: []vm.LabelPair{{Name: "job", Value: "2"}},
|
||||
Timestamps: []int64{1669454585000, 1669454615000},
|
||||
Values: []float64{200, 200},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "step month on month time range",
|
||||
remoteReadConfig: remoteread.Config{Addr: "", LabelName: "__name__", LabelValue: ".*", UseStream: true},
|
||||
vmCfg: vm.Config{Addr: "", Concurrency: 1, DisableProgressBar: true},
|
||||
start: "2022-09-26T11:23:05+02:00",
|
||||
end: "2022-11-26T11:24:05+02:00",
|
||||
numOfSamples: 2,
|
||||
numOfSeries: 3,
|
||||
chunk: stepper.StepMonth,
|
||||
remoteReadSeries: remote_read_integration.GenerateRemoteReadSeries,
|
||||
expectedSeries: []vm.TimeSeries{
|
||||
{
|
||||
Name: "vm_metric_1",
|
||||
LabelPairs: []vm.LabelPair{{Name: "job", Value: "0"}},
|
||||
Timestamps: []int64{1664184185000},
|
||||
Values: []float64{0},
|
||||
},
|
||||
{
|
||||
Name: "vm_metric_1",
|
||||
LabelPairs: []vm.LabelPair{{Name: "job", Value: "1"}},
|
||||
Timestamps: []int64{1664184185000},
|
||||
Values: []float64{100},
|
||||
},
|
||||
{
|
||||
Name: "vm_metric_1",
|
||||
LabelPairs: []vm.LabelPair{{Name: "job", Value: "2"}},
|
||||
Timestamps: []int64{1664184185000},
|
||||
Values: []float64{200},
|
||||
},
|
||||
{
|
||||
Name: "vm_metric_1",
|
||||
LabelPairs: []vm.LabelPair{{Name: "job", Value: "0"}},
|
||||
Timestamps: []int64{1666819415000},
|
||||
Values: []float64{0},
|
||||
},
|
||||
{
|
||||
Name: "vm_metric_1",
|
||||
LabelPairs: []vm.LabelPair{{Name: "job", Value: "1"}},
|
||||
Timestamps: []int64{1666819415000},
|
||||
Values: []float64{100},
|
||||
},
|
||||
{
|
||||
Name: "vm_metric_1",
|
||||
LabelPairs: []vm.LabelPair{{Name: "job", Value: "2"}},
|
||||
Timestamps: []int64{1666819415000},
|
||||
Values: []float64{200}},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range testCases {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
remoteReadServer := remote_read_integration.NewRemoteReadStreamServer(t)
|
||||
defer remoteReadServer.Close()
|
||||
remoteWriteServer := remote_read_integration.NewRemoteWriteServer(t)
|
||||
defer remoteWriteServer.Close()
|
||||
|
||||
tt.remoteReadConfig.Addr = remoteReadServer.URL()
|
||||
|
||||
rr, err := remoteread.NewClient(tt.remoteReadConfig)
|
||||
if err != nil {
|
||||
t.Fatalf("error create remote read client: %s", err)
|
||||
}
|
||||
|
||||
start, err := time.Parse(time.RFC3339, tt.start)
|
||||
if err != nil {
|
||||
t.Fatalf("Error parse start time: %s", err)
|
||||
}
|
||||
|
||||
end, err := time.Parse(time.RFC3339, tt.end)
|
||||
if err != nil {
|
||||
t.Fatalf("Error parse end time: %s", err)
|
||||
}
|
||||
|
||||
rrs := tt.remoteReadSeries(start.Unix(), end.Unix(), tt.numOfSeries, tt.numOfSamples)
|
||||
|
||||
remoteReadServer.InitMockStorage(rrs)
|
||||
remoteWriteServer.ExpectedSeries(tt.expectedSeries)
|
||||
|
||||
tt.vmCfg.Addr = remoteWriteServer.URL()
|
||||
|
||||
importer, err := vm.NewImporter(tt.vmCfg)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to create VM importer: %s", err)
|
||||
}
|
||||
defer importer.Close()
|
||||
|
||||
rmp := remoteReadProcessor{
|
||||
src: rr,
|
||||
dst: importer,
|
||||
filter: remoteReadFilter{
|
||||
timeStart: &start,
|
||||
timeEnd: &end,
|
||||
chunk: tt.chunk,
|
||||
},
|
||||
cc: 1,
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
err = rmp.run(ctx, true, false)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to run remote read processor: %s", err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
366
app/vmctl/testdata/servers_integration_test/remote_read_server.go
vendored
Normal file
366
app/vmctl/testdata/servers_integration_test/remote_read_server.go
vendored
Normal file
|
@ -0,0 +1,366 @@
|
|||
package remote_read_integration
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"strconv"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/gogo/protobuf/proto"
|
||||
"github.com/golang/snappy"
|
||||
"github.com/prometheus/prometheus/model/labels"
|
||||
"github.com/prometheus/prometheus/prompb"
|
||||
"github.com/prometheus/prometheus/storage/remote"
|
||||
)
|
||||
|
||||
const (
|
||||
maxBytesInFrame = 1024 * 1024
|
||||
)
|
||||
|
||||
type RemoteReadServer struct {
|
||||
server *httptest.Server
|
||||
series []*prompb.TimeSeries
|
||||
storage *MockStorage
|
||||
}
|
||||
|
||||
// NewRemoteReadServer creates a remote read server. It exposes a single endpoint and responds with the
|
||||
// passed series based on the request to the read endpoint. It returns a server which should be closed after
|
||||
// being used.
|
||||
func NewRemoteReadServer(t *testing.T) *RemoteReadServer {
|
||||
rrs := &RemoteReadServer{
|
||||
series: make([]*prompb.TimeSeries, 0),
|
||||
}
|
||||
rrs.server = httptest.NewServer(rrs.getReadHandler(t))
|
||||
return rrs
|
||||
}
|
||||
|
||||
// Close closes the server.
|
||||
func (rrs *RemoteReadServer) Close() {
|
||||
rrs.server.Close()
|
||||
}
|
||||
|
||||
func (rrs *RemoteReadServer) URL() string {
|
||||
return rrs.server.URL
|
||||
}
|
||||
|
||||
func (rrs *RemoteReadServer) SetRemoteReadSeries(series []*prompb.TimeSeries) {
|
||||
rrs.series = append(rrs.series, series...)
|
||||
}
|
||||
|
||||
func (rrs *RemoteReadServer) getReadHandler(t *testing.T) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
if !validateReadHeaders(t, r) {
|
||||
t.Fatalf("invalid read headers")
|
||||
}
|
||||
|
||||
compressed, err := io.ReadAll(r.Body)
|
||||
if err != nil {
|
||||
t.Fatalf("error read body: %s", err)
|
||||
}
|
||||
|
||||
reqBuf, err := snappy.Decode(nil, compressed)
|
||||
if err != nil {
|
||||
t.Fatalf("error decode compressed data:%s", err)
|
||||
}
|
||||
|
||||
var req prompb.ReadRequest
|
||||
if err := proto.Unmarshal(reqBuf, &req); err != nil {
|
||||
t.Fatalf("error unmarshal read request: %s", err)
|
||||
}
|
||||
|
||||
resp := &prompb.ReadResponse{
|
||||
Results: make([]*prompb.QueryResult, len(req.Queries)),
|
||||
}
|
||||
|
||||
for i, r := range req.Queries {
|
||||
startTs := r.StartTimestampMs
|
||||
endTs := r.EndTimestampMs
|
||||
ts := make([]*prompb.TimeSeries, len(rrs.series))
|
||||
for i, s := range rrs.series {
|
||||
var samples []prompb.Sample
|
||||
for _, sample := range s.Samples {
|
||||
if sample.Timestamp >= startTs && sample.Timestamp < endTs {
|
||||
samples = append(samples, sample)
|
||||
}
|
||||
}
|
||||
var series prompb.TimeSeries
|
||||
if len(samples) > 0 {
|
||||
series.Labels = s.Labels
|
||||
series.Samples = samples
|
||||
}
|
||||
ts[i] = &series
|
||||
}
|
||||
|
||||
resp.Results[i] = &prompb.QueryResult{Timeseries: ts}
|
||||
data, err := proto.Marshal(resp)
|
||||
if err != nil {
|
||||
t.Fatalf("error marshal response: %s", err)
|
||||
}
|
||||
|
||||
compressed = snappy.Encode(nil, data)
|
||||
|
||||
w.Header().Set("Content-Type", "application/x-protobuf")
|
||||
w.Header().Set("Content-Encoding", "snappy")
|
||||
w.WriteHeader(http.StatusOK)
|
||||
|
||||
if _, err := w.Write(compressed); err != nil {
|
||||
t.Fatalf("snappy encode error: %s", err)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func NewRemoteReadStreamServer(t *testing.T) *RemoteReadServer {
|
||||
rrs := &RemoteReadServer{
|
||||
series: make([]*prompb.TimeSeries, 0),
|
||||
}
|
||||
rrs.server = httptest.NewServer(rrs.getStreamReadHandler(t))
|
||||
return rrs
|
||||
}
|
||||
|
||||
func (rrs *RemoteReadServer) InitMockStorage(series []*prompb.TimeSeries) {
|
||||
rrs.storage = NewMockStorage(series)
|
||||
}
|
||||
|
||||
func (rrs *RemoteReadServer) getStreamReadHandler(t *testing.T) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
if !validateStreamReadHeaders(t, r) {
|
||||
t.Fatalf("invalid read headers")
|
||||
}
|
||||
|
||||
f, ok := w.(http.Flusher)
|
||||
if !ok {
|
||||
t.Fatalf("internal http.ResponseWriter does not implement http.Flusher interface")
|
||||
}
|
||||
|
||||
stream := remote.NewChunkedWriter(w, f)
|
||||
|
||||
data, err := io.ReadAll(r.Body)
|
||||
if err != nil {
|
||||
t.Fatalf("error read body: %s", err)
|
||||
}
|
||||
|
||||
decodedData, err := snappy.Decode(nil, data)
|
||||
if err != nil {
|
||||
t.Fatalf("error decode compressed data:%s", err)
|
||||
}
|
||||
|
||||
var req prompb.ReadRequest
|
||||
if err := proto.Unmarshal(decodedData, &req); err != nil {
|
||||
t.Fatalf("error unmarshal read request: %s", err)
|
||||
}
|
||||
|
||||
var chunks []prompb.Chunk
|
||||
ctx := context.Background()
|
||||
for idx, r := range req.Queries {
|
||||
startTs := r.StartTimestampMs
|
||||
endTs := r.EndTimestampMs
|
||||
|
||||
var matchers []*labels.Matcher
|
||||
cb := func() (int64, error) { return 0, nil }
|
||||
|
||||
c := remote.NewSampleAndChunkQueryableClient(rrs.storage, nil, matchers, true, cb)
|
||||
|
||||
q, err := c.ChunkQuerier(ctx, startTs, endTs)
|
||||
if err != nil {
|
||||
t.Fatalf("error init chunk querier: %s", err)
|
||||
}
|
||||
|
||||
ss := q.Select(false, nil, matchers...)
|
||||
for ss.Next() {
|
||||
series := ss.At()
|
||||
iter := series.Iterator()
|
||||
labels := remote.MergeLabels(labelsToLabelsProto(series.Labels()), nil)
|
||||
|
||||
frameBytesLeft := maxBytesInFrame
|
||||
for _, lb := range labels {
|
||||
frameBytesLeft -= lb.Size()
|
||||
}
|
||||
|
||||
isNext := iter.Next()
|
||||
|
||||
for isNext {
|
||||
chunk := iter.At()
|
||||
|
||||
if chunk.Chunk == nil {
|
||||
t.Fatalf("error found not populated chunk returned by SeriesSet at ref: %v", chunk.Ref)
|
||||
}
|
||||
|
||||
chunks = append(chunks, prompb.Chunk{
|
||||
MinTimeMs: chunk.MinTime,
|
||||
MaxTimeMs: chunk.MaxTime,
|
||||
Type: prompb.Chunk_Encoding(chunk.Chunk.Encoding()),
|
||||
Data: chunk.Chunk.Bytes(),
|
||||
})
|
||||
|
||||
frameBytesLeft -= chunks[len(chunks)-1].Size()
|
||||
|
||||
// We are fine with minor inaccuracy of max bytes per frame. The inaccuracy will be max of full chunk size.
|
||||
isNext = iter.Next()
|
||||
if frameBytesLeft > 0 && isNext {
|
||||
continue
|
||||
}
|
||||
|
||||
resp := &prompb.ChunkedReadResponse{
|
||||
ChunkedSeries: []*prompb.ChunkedSeries{
|
||||
{Labels: labels, Chunks: chunks},
|
||||
},
|
||||
QueryIndex: int64(idx),
|
||||
}
|
||||
|
||||
b, err := proto.Marshal(resp)
|
||||
if err != nil {
|
||||
t.Fatalf("error marshal response: %s", err)
|
||||
}
|
||||
|
||||
if _, err := stream.Write(b); err != nil {
|
||||
t.Fatalf("error write to stream: %s", err)
|
||||
}
|
||||
chunks = chunks[:0]
|
||||
rrs.storage.Reset()
|
||||
}
|
||||
if err := iter.Err(); err != nil {
|
||||
t.Fatalf("error iterate over chunk series: %s", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func validateReadHeaders(t *testing.T, r *http.Request) bool {
|
||||
if r.Method != http.MethodPost {
|
||||
t.Fatalf("got %q method, expected %q", r.Method, http.MethodPost)
|
||||
}
|
||||
if r.Header.Get("Content-Encoding") != "snappy" {
|
||||
t.Fatalf("got %q content encoding header, expected %q", r.Header.Get("Content-Encoding"), "snappy")
|
||||
}
|
||||
if r.Header.Get("Content-Type") != "application/x-protobuf" {
|
||||
t.Fatalf("got %q content type header, expected %q", r.Header.Get("Content-Type"), "application/x-protobuf")
|
||||
}
|
||||
|
||||
remoteReadVersion := r.Header.Get("X-Prometheus-Remote-Read-Version")
|
||||
if remoteReadVersion == "" {
|
||||
t.Fatalf("got empty prometheus remote read header")
|
||||
}
|
||||
if !strings.HasPrefix(remoteReadVersion, "0.1.") {
|
||||
t.Fatalf("wrong remote version defined")
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func validateStreamReadHeaders(t *testing.T, r *http.Request) bool {
|
||||
if r.Method != http.MethodPost {
|
||||
t.Fatalf("got %q method, expected %q", r.Method, http.MethodPost)
|
||||
}
|
||||
if r.Header.Get("Content-Encoding") != "snappy" {
|
||||
t.Fatalf("got %q content encoding header, expected %q", r.Header.Get("Content-Encoding"), "snappy")
|
||||
}
|
||||
if r.Header.Get("Content-Type") != "application/x-streamed-protobuf; proto=prometheus.ChunkedReadResponse" {
|
||||
t.Fatalf("got %q content type header, expected %q", r.Header.Get("Content-Type"), "application/x-streamed-protobuf; proto=prometheus.ChunkedReadResponse")
|
||||
}
|
||||
|
||||
remoteReadVersion := r.Header.Get("X-Prometheus-Remote-Read-Version")
|
||||
if remoteReadVersion == "" {
|
||||
t.Fatalf("got empty prometheus remote read header")
|
||||
}
|
||||
if !strings.HasPrefix(remoteReadVersion, "0.1.") {
|
||||
t.Fatalf("wrong remote version defined")
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func GenerateRemoteReadSeries(start, end, numOfSeries, numOfSamples int64) []*prompb.TimeSeries {
|
||||
var ts []*prompb.TimeSeries
|
||||
j := 0
|
||||
for i := 0; i < int(numOfSeries); i++ {
|
||||
if i%3 == 0 {
|
||||
j++
|
||||
}
|
||||
|
||||
timeSeries := prompb.TimeSeries{
|
||||
Labels: []prompb.Label{
|
||||
{Name: labels.MetricName, Value: fmt.Sprintf("vm_metric_%d", j)},
|
||||
{Name: "job", Value: strconv.Itoa(i)},
|
||||
},
|
||||
}
|
||||
|
||||
ts = append(ts, &timeSeries)
|
||||
}
|
||||
|
||||
for i := range ts {
|
||||
ts[i].Samples = generateRemoteReadSamples(i, start, end, numOfSamples)
|
||||
}
|
||||
|
||||
return ts
|
||||
}
|
||||
|
||||
func generateRemoteReadSamples(idx int, startTime, endTime, numOfSamples int64) []prompb.Sample {
|
||||
samples := make([]prompb.Sample, 0)
|
||||
delta := (endTime - startTime) / numOfSamples
|
||||
|
||||
t := startTime
|
||||
for t != endTime {
|
||||
v := 100 * int64(idx)
|
||||
samples = append(samples, prompb.Sample{
|
||||
Timestamp: t * 1000,
|
||||
Value: float64(v),
|
||||
})
|
||||
t = t + delta
|
||||
}
|
||||
|
||||
return samples
|
||||
}
|
||||
|
||||
type MockStorage struct {
|
||||
query *prompb.Query
|
||||
store []*prompb.TimeSeries
|
||||
}
|
||||
|
||||
func NewMockStorage(series []*prompb.TimeSeries) *MockStorage {
|
||||
return &MockStorage{store: series}
|
||||
}
|
||||
|
||||
func (ms *MockStorage) Read(_ context.Context, query *prompb.Query) (*prompb.QueryResult, error) {
|
||||
if ms.query != nil {
|
||||
return nil, fmt.Errorf("expected only one call to remote client got: %v", query)
|
||||
}
|
||||
ms.query = query
|
||||
|
||||
q := &prompb.QueryResult{Timeseries: make([]*prompb.TimeSeries, 0, len(ms.store))}
|
||||
for _, s := range ms.store {
|
||||
var samples []prompb.Sample
|
||||
for _, sample := range s.Samples {
|
||||
if sample.Timestamp >= query.StartTimestampMs && sample.Timestamp < query.EndTimestampMs {
|
||||
samples = append(samples, sample)
|
||||
}
|
||||
}
|
||||
var series prompb.TimeSeries
|
||||
if len(samples) > 0 {
|
||||
series.Labels = s.Labels
|
||||
series.Samples = samples
|
||||
}
|
||||
|
||||
q.Timeseries = append(q.Timeseries, &series)
|
||||
}
|
||||
return q, nil
|
||||
}
|
||||
|
||||
func (ms *MockStorage) Reset() {
|
||||
ms.query = nil
|
||||
}
|
||||
|
||||
func labelsToLabelsProto(labels labels.Labels) []prompb.Label {
|
||||
result := make([]prompb.Label, 0, len(labels))
|
||||
for _, l := range labels {
|
||||
result = append(result, prompb.Label{
|
||||
Name: l.Name,
|
||||
Value: l.Value,
|
||||
})
|
||||
}
|
||||
return result
|
||||
}
|
86
app/vmctl/testdata/servers_integration_test/remote_write_server.go
vendored
Normal file
86
app/vmctl/testdata/servers_integration_test/remote_write_server.go
vendored
Normal file
|
@ -0,0 +1,86 @@
|
|||
package remote_read_integration
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"github.com/VictoriaMetrics/VictoriaMetrics/app/vmctl/vm"
|
||||
parser "github.com/VictoriaMetrics/VictoriaMetrics/lib/protoparser/vmimport"
|
||||
)
|
||||
|
||||
type RemoteWriteServer struct {
|
||||
server *httptest.Server
|
||||
series []vm.TimeSeries
|
||||
}
|
||||
|
||||
// NewRemoteWriteServer prepares test remote write server
|
||||
func NewRemoteWriteServer(t *testing.T) *RemoteWriteServer {
|
||||
rws := &RemoteWriteServer{series: make([]vm.TimeSeries, 0)}
|
||||
mux := http.NewServeMux()
|
||||
mux.Handle("/api/v1/import", rws.getWriteHandler(t))
|
||||
mux.Handle("/health", rws.handlePing())
|
||||
rws.server = httptest.NewServer(mux)
|
||||
return rws
|
||||
}
|
||||
|
||||
// Close closes the server.
|
||||
func (rws *RemoteWriteServer) Close() {
|
||||
rws.server.Close()
|
||||
}
|
||||
|
||||
func (rws *RemoteWriteServer) ExpectedSeries(series []vm.TimeSeries) {
|
||||
rws.series = append(rws.series, series...)
|
||||
}
|
||||
|
||||
func (rws *RemoteWriteServer) URL() string {
|
||||
return rws.server.URL
|
||||
}
|
||||
|
||||
func (rws *RemoteWriteServer) getWriteHandler(t *testing.T) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
var tss []vm.TimeSeries
|
||||
scanner := bufio.NewScanner(r.Body)
|
||||
var rows parser.Rows
|
||||
for scanner.Scan() {
|
||||
|
||||
rows.Unmarshal(scanner.Text())
|
||||
for _, row := range rows.Rows {
|
||||
var labelPairs []vm.LabelPair
|
||||
var ts vm.TimeSeries
|
||||
nameValue := ""
|
||||
for _, tag := range row.Tags {
|
||||
if string(tag.Key) == "__name__" {
|
||||
nameValue = string(tag.Value)
|
||||
continue
|
||||
}
|
||||
labelPairs = append(labelPairs, vm.LabelPair{Name: string(tag.Key), Value: string(tag.Value)})
|
||||
}
|
||||
|
||||
ts.Values = append(ts.Values, row.Values...)
|
||||
ts.Timestamps = append(ts.Timestamps, row.Timestamps...)
|
||||
ts.Name = nameValue
|
||||
ts.LabelPairs = labelPairs
|
||||
tss = append(tss, ts)
|
||||
}
|
||||
rows.Reset()
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(tss, rws.series) {
|
||||
w.WriteHeader(http.StatusInternalServerError)
|
||||
t.Fatalf("datasets not equal, expected: %#v; \n got: %#v", rws.series, tss)
|
||||
return
|
||||
}
|
||||
|
||||
w.WriteHeader(http.StatusNoContent)
|
||||
})
|
||||
}
|
||||
|
||||
func (rws *RemoteWriteServer) handlePing() http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.WriteHeader(http.StatusOK)
|
||||
_, _ = w.Write([]byte("OK"))
|
||||
})
|
||||
}
|
|
@ -161,8 +161,10 @@ func RequestHandler(w http.ResponseWriter, r *http.Request) bool {
|
|||
case strings.HasPrefix(path, "/graphite/"):
|
||||
path = path[len("/graphite"):]
|
||||
}
|
||||
|
||||
// vmui access.
|
||||
if path == "/vmui" || path == "/graph" {
|
||||
switch {
|
||||
case path == "/vmui" || path == "/graph":
|
||||
// VMUI access via incomplete url without `/` in the end. Redirect to complete url.
|
||||
// Use relative redirect, since, since the hostname and path prefix may be incorrect if VictoriaMetrics
|
||||
// is hidden behind vmauth or similar proxy.
|
||||
|
@ -171,18 +173,31 @@ func RequestHandler(w http.ResponseWriter, r *http.Request) bool {
|
|||
newURL := path + "/?" + r.Form.Encode()
|
||||
httpserver.Redirect(w, newURL)
|
||||
return true
|
||||
}
|
||||
if strings.HasPrefix(path, "/vmui/") {
|
||||
case strings.HasPrefix(path, "/vmui/"):
|
||||
if path == "/vmui/custom-dashboards" {
|
||||
if err := handleVMUICustomDashboards(w); err != nil {
|
||||
httpserver.Errorf(w, r, "%s", err)
|
||||
return true
|
||||
}
|
||||
return true
|
||||
}
|
||||
r.URL.Path = path
|
||||
vmuiFileServer.ServeHTTP(w, r)
|
||||
return true
|
||||
}
|
||||
if strings.HasPrefix(path, "/graph/") {
|
||||
case strings.HasPrefix(path, "/graph/"):
|
||||
// This is needed for serving /graph URLs from Prometheus datasource in Grafana.
|
||||
if path == "/graph/custom-dashboards" {
|
||||
if err := handleVMUICustomDashboards(w); err != nil {
|
||||
httpserver.Errorf(w, r, "%s", err)
|
||||
return true
|
||||
}
|
||||
return true
|
||||
}
|
||||
r.URL.Path = strings.Replace(path, "/graph/", "/vmui/", 1)
|
||||
vmuiFileServer.ServeHTTP(w, r)
|
||||
return true
|
||||
}
|
||||
|
||||
if strings.HasPrefix(path, "/api/v1/label/") {
|
||||
s := path[len("/api/v1/label/"):]
|
||||
if strings.HasSuffix(s, "/values") {
|
||||
|
|
112
app/vmselect/vmui.go
Normal file
112
app/vmselect/vmui.go
Normal file
|
@ -0,0 +1,112 @@
|
|||
package vmselect
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"flag"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/VictoriaMetrics/VictoriaMetrics/lib/fs"
|
||||
"github.com/VictoriaMetrics/VictoriaMetrics/lib/logger"
|
||||
)
|
||||
|
||||
var (
|
||||
vmuiCustomDashboardsPath = flag.String("vmui.customDashboardsPath", "", "Optional path to vmui dashboards. "+
|
||||
"See https://github.com/VictoriaMetrics/VictoriaMetrics/tree/master/app/vmui/packages/vmui/public/dashboards")
|
||||
)
|
||||
|
||||
// dashboardSettings represents dashboard settings file struct.
|
||||
//
|
||||
// See https://github.com/VictoriaMetrics/VictoriaMetrics/tree/master/app/vmui/packages/vmui/public/dashboards
|
||||
type dashboardSettings struct {
|
||||
Title string `json:"title,omitempty"`
|
||||
Filename string `json:"filename,omitempty"`
|
||||
Rows []dashboardRow `json:"rows"`
|
||||
}
|
||||
|
||||
// panelSettings represents fields which used to show graph.
|
||||
//
|
||||
// See https://github.com/VictoriaMetrics/VictoriaMetrics/tree/master/app/vmui/packages/vmui/public/dashboards
|
||||
type panelSettings struct {
|
||||
Title string `json:"title,omitempty"`
|
||||
Description string `json:"description,omitempty"`
|
||||
Unit string `json:"unit,omitempty"`
|
||||
Expr []string `json:"expr"`
|
||||
Alias []string `json:"alias,omitempty"`
|
||||
ShowLegend bool `json:"showLegend,omitempty"`
|
||||
Width int `json:"width,omitempty"`
|
||||
}
|
||||
|
||||
// dashboardRow represents panels on dashboard.
|
||||
//
|
||||
// See https://github.com/VictoriaMetrics/VictoriaMetrics/tree/master/app/vmui/packages/vmui/public/dashboards
|
||||
type dashboardRow struct {
|
||||
Title string `json:"title,omitempty"`
|
||||
Panels []panelSettings `json:"panels"`
|
||||
}
|
||||
|
||||
// dashboardsData represents all the dashboards settings.
|
||||
type dashboardsData struct {
|
||||
DashboardsSettings []dashboardSettings `json:"dashboardsSettings"`
|
||||
}
|
||||
|
||||
func handleVMUICustomDashboards(w http.ResponseWriter) error {
|
||||
path := *vmuiCustomDashboardsPath
|
||||
if path == "" {
|
||||
writeSuccessResponse(w, []byte(`{"dashboardsSettings": []}`))
|
||||
return nil
|
||||
}
|
||||
settings, err := collectDashboardsSettings(path)
|
||||
if err != nil {
|
||||
return fmt.Errorf("cannot collect dashboards settings by -vmui.customDashboardsPath=%q: %w", path, err)
|
||||
}
|
||||
writeSuccessResponse(w, settings)
|
||||
return nil
|
||||
}
|
||||
|
||||
func writeSuccessResponse(w http.ResponseWriter, data []byte) {
|
||||
w.WriteHeader(http.StatusOK)
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.Write(data)
|
||||
}
|
||||
|
||||
func collectDashboardsSettings(path string) ([]byte, error) {
|
||||
if !fs.IsPathExist(path) {
|
||||
return nil, fmt.Errorf("cannot find folder %q", path)
|
||||
}
|
||||
files, err := os.ReadDir(path)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("cannot read folder %q", path)
|
||||
}
|
||||
|
||||
var dss []dashboardSettings
|
||||
for _, file := range files {
|
||||
filename := file.Name()
|
||||
if err != nil {
|
||||
logger.Errorf("skipping %q at -vmui.customDashboardsPath=%q, since the info for this file cannot be obtained: %s", filename, path, err)
|
||||
continue
|
||||
}
|
||||
if filepath.Ext(filename) != ".json" {
|
||||
continue
|
||||
}
|
||||
filePath := filepath.Join(path, filename)
|
||||
f, err := os.ReadFile(filePath)
|
||||
if err != nil {
|
||||
// There is no need to add more context to the returned error, since os.ReadFile() adds enough context.
|
||||
return nil, err
|
||||
}
|
||||
var ds dashboardSettings
|
||||
err = json.Unmarshal(f, &ds)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("cannot parse file %s: %w", filePath, err)
|
||||
}
|
||||
if len(ds.Rows) > 0 {
|
||||
dss = append(dss, ds)
|
||||
}
|
||||
}
|
||||
|
||||
dd := dashboardsData{DashboardsSettings: dss}
|
||||
return json.Marshal(dd)
|
||||
}
|
|
@ -1,12 +1,12 @@
|
|||
{
|
||||
"files": {
|
||||
"main.css": "./static/css/main.e9e7cdb7.css",
|
||||
"main.js": "./static/js/main.d34bbb5e.js",
|
||||
"main.css": "./static/css/main.7672c15c.css",
|
||||
"main.js": "./static/js/main.84759f8d.js",
|
||||
"static/js/27.c1ccfd29.chunk.js": "./static/js/27.c1ccfd29.chunk.js",
|
||||
"index.html": "./index.html"
|
||||
},
|
||||
"entrypoints": [
|
||||
"static/css/main.e9e7cdb7.css",
|
||||
"static/js/main.d34bbb5e.js"
|
||||
"static/css/main.7672c15c.css",
|
||||
"static/js/main.84759f8d.js"
|
||||
]
|
||||
}
|
|
@ -3,6 +3,24 @@
|
|||
2. Import your config file into the `dashboards/index.js`
|
||||
3. Add filename into the array `window.__VMUI_PREDEFINED_DASHBOARDS__`
|
||||
|
||||
It is possible to define path to the predefined dashboards by setting `--vmui.customDashboardsPath`.
|
||||
|
||||
1. Single Version
|
||||
If you use single version of the VictoriaMetrics this flag should be provided for you execution file.
|
||||
```
|
||||
./victoria-metrics --vmui.customDashboardsPath=/path/to/your/dashboards
|
||||
```
|
||||
|
||||
2. Cluster Version
|
||||
If you use cluster version this flag should be defined for each `vmselect` component.
|
||||
```
|
||||
./vmselect -storageNode=:8418 --vmui.customDashboardsPath=/path/to/your/dashboards
|
||||
```
|
||||
At that moment all predefined dashboards files show be near each `vmselect`. For example
|
||||
if you have 3 `vmselect` instances you should create 3 copy of your predefined dashboards.
|
||||
|
||||
|
||||
|
||||
### Configuration options
|
||||
|
||||
<br/>
|
||||
|
|
|
@ -1 +1 @@
|
|||
<!doctype html><html lang="en"><head><meta charset="utf-8"/><link rel="icon" href="./favicon.ico"/><meta name="viewport" content="width=device-width,initial-scale=1"/><meta name="theme-color" content="#000000"/><meta name="description" content="VM-UI is a metric explorer for Victoria Metrics"/><link rel="apple-touch-icon" href="./apple-touch-icon.png"/><link rel="icon" type="image/png" sizes="32x32" href="./favicon-32x32.png"><link rel="manifest" href="./manifest.json"/><title>VM UI</title><link rel="preconnect" href="https://fonts.googleapis.com"><link rel="preconnect" href="https://fonts.gstatic.com" crossorigin><link href="https://fonts.googleapis.com/css2?family=JetBrains+Mono&family=Lato:wght@300;400;700&display=swap" rel="stylesheet"><script src="./dashboards/index.js" type="module"></script><script defer="defer" src="./static/js/main.d34bbb5e.js"></script><link href="./static/css/main.e9e7cdb7.css" rel="stylesheet"></head><body><noscript>You need to enable JavaScript to run this app.</noscript><div id="root"></div></body></html>
|
||||
<!doctype html><html lang="en"><head><meta charset="utf-8"/><link rel="icon" href="./favicon.ico"/><meta name="viewport" content="width=device-width,initial-scale=1"/><meta name="theme-color" content="#000000"/><meta name="description" content="VM-UI is a metric explorer for Victoria Metrics"/><link rel="apple-touch-icon" href="./apple-touch-icon.png"/><link rel="icon" type="image/png" sizes="32x32" href="./favicon-32x32.png"><link rel="manifest" href="./manifest.json"/><title>VM UI</title><link rel="preconnect" href="https://fonts.googleapis.com"><link rel="preconnect" href="https://fonts.gstatic.com" crossorigin><link href="https://fonts.googleapis.com/css2?family=JetBrains+Mono&family=Lato:wght@300;400;700&display=swap" rel="stylesheet"><script src="./dashboards/index.js" type="module"></script><script defer="defer" src="./static/js/main.84759f8d.js"></script><link href="./static/css/main.7672c15c.css" rel="stylesheet"></head><body><noscript>You need to enable JavaScript to run this app.</noscript><div id="root"></div></body></html>
|
File diff suppressed because one or more lines are too long
2
app/vmselect/vmui/static/js/main.84759f8d.js
Normal file
2
app/vmselect/vmui/static/js/main.84759f8d.js
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -3,6 +3,24 @@
|
|||
2. Import your config file into the `dashboards/index.js`
|
||||
3. Add filename into the array `window.__VMUI_PREDEFINED_DASHBOARDS__`
|
||||
|
||||
It is possible to define path to the predefined dashboards by setting `--vmui.customDashboardsPath`.
|
||||
|
||||
1. Single Version
|
||||
If you use single version of the VictoriaMetrics this flag should be provided for you execution file.
|
||||
```
|
||||
./victoria-metrics --vmui.customDashboardsPath=/path/to/your/dashboards
|
||||
```
|
||||
|
||||
2. Cluster Version
|
||||
If you use cluster version this flag should be defined for each `vmselect` component.
|
||||
```
|
||||
./vmselect -storageNode=:8418 --vmui.customDashboardsPath=/path/to/your/dashboards
|
||||
```
|
||||
At that moment all predefined dashboards files show be near each `vmselect`. For example
|
||||
if you have 3 `vmselect` instances you should create 3 copy of your predefined dashboards.
|
||||
|
||||
|
||||
|
||||
### Configuration options
|
||||
|
||||
<br/>
|
||||
|
|
|
@ -17,6 +17,7 @@ export interface ChartTooltipProps {
|
|||
u: uPlot,
|
||||
metrics: MetricResult[],
|
||||
series: Series[],
|
||||
yRange: number[];
|
||||
unit?: string,
|
||||
isSticky?: boolean,
|
||||
tooltipOffset: { left: number, top: number },
|
||||
|
@ -30,6 +31,7 @@ const ChartTooltip: FC<ChartTooltipProps> = ({
|
|||
unit = "",
|
||||
metrics,
|
||||
series,
|
||||
yRange,
|
||||
tooltipIdx,
|
||||
tooltipOffset,
|
||||
isSticky,
|
||||
|
@ -46,22 +48,24 @@ const ChartTooltip: FC<ChartTooltipProps> = ({
|
|||
|
||||
const targetPortal = useMemo(() => u.root.querySelector(".u-wrap"), [u]);
|
||||
|
||||
const value = useMemo(() => get(u, ["data", seriesIdx, dataIdx], 0), [u, seriesIdx, dataIdx]);
|
||||
const valueFormat = useMemo(() => formatPrettyNumber(value), [value]);
|
||||
const dataTime = useMemo(() => u.data[0][dataIdx], [u, dataIdx]);
|
||||
const date = useMemo(() => dayjs(dataTime * 1000).tz().format(DATE_FULL_TIMEZONE_FORMAT), [dataTime]);
|
||||
const value = get(u, ["data", seriesIdx, dataIdx], 0);
|
||||
const valueFormat = formatPrettyNumber(value, get(yRange, [0]), get(yRange, [1]));
|
||||
const dataTime = u.data[0][dataIdx];
|
||||
const date = dayjs(dataTime * 1000).tz().format(DATE_FULL_TIMEZONE_FORMAT);
|
||||
|
||||
const color = useMemo(() => series[seriesIdx]?.stroke+"", [series, seriesIdx]);
|
||||
const color = series[seriesIdx]?.stroke+"";
|
||||
|
||||
const name = useMemo(() => {
|
||||
const group = metrics[seriesIdx -1]?.group || 0;
|
||||
return `Query ${group}`;
|
||||
}, [series, seriesIdx]);
|
||||
const groups = new Set();
|
||||
metrics.forEach(m => groups.add(m.group));
|
||||
const groupsSize = groups.size;
|
||||
const group = metrics[seriesIdx-1]?.group || 0;
|
||||
|
||||
const metric = metrics[seriesIdx-1]?.metric || {};
|
||||
const labelNames = Object.keys(metric).filter(x => x != "__name__");
|
||||
const metricName = metric["__name__"] || "value";
|
||||
|
||||
const fields = useMemo(() => {
|
||||
const metric = metrics[seriesIdx - 1]?.metric || {};
|
||||
const fields = Object.keys(metric);
|
||||
return fields.map(key => `${key}=${JSON.stringify(metric[key])}`);
|
||||
return labelNames.map(key => `${key}=${JSON.stringify(metric[key])}`);
|
||||
}, [metrics, seriesIdx]);
|
||||
|
||||
const handleClose = () => {
|
||||
|
@ -136,7 +140,12 @@ const ChartTooltip: FC<ChartTooltipProps> = ({
|
|||
style={position}
|
||||
>
|
||||
<div className="vm-chart-tooltip-header">
|
||||
<div className="vm-chart-tooltip-header__date">{date}</div>
|
||||
<div className="vm-chart-tooltip-header__date">
|
||||
{groupsSize > 1 && (
|
||||
<div>Query {group}</div>
|
||||
)}
|
||||
{date}
|
||||
</div>
|
||||
{isSticky && (
|
||||
<>
|
||||
<Button
|
||||
|
@ -162,7 +171,7 @@ const ChartTooltip: FC<ChartTooltipProps> = ({
|
|||
style={{ background: color }}
|
||||
/>
|
||||
<p>
|
||||
{name}:
|
||||
{metricName}:
|
||||
<b className="vm-chart-tooltip-data__value">{valueFormat}</b>
|
||||
{unit}
|
||||
</p>
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
@use "src/styles/variables" as *;
|
||||
$chart-tooltip-width: 300px;
|
||||
$chart-tooltip-width: 325px;
|
||||
$chart-tooltip-icon-width: 25px;
|
||||
$chart-tooltip-half-icon: calc($chart-tooltip-icon-width/2);
|
||||
$chart-tooltip-date-width: $chart-tooltip-width - (2*$chart-tooltip-icon-width) - (2*$padding-global) - $padding-small;
|
||||
|
|
|
@ -50,6 +50,7 @@ const LineChart: FC<LineChartProps> = ({
|
|||
const uPlotRef = useRef<HTMLDivElement>(null);
|
||||
const [isPanning, setPanning] = useState(false);
|
||||
const [xRange, setXRange] = useState({ min: period.start, max: period.end });
|
||||
const [yRange, setYRange] = useState([0, 1]);
|
||||
const [uPlotInst, setUPlotInst] = useState<uPlot>();
|
||||
const layoutSize = useResize(container);
|
||||
|
||||
|
@ -128,6 +129,7 @@ const LineChart: FC<LineChartProps> = ({
|
|||
unit,
|
||||
series,
|
||||
metrics,
|
||||
yRange,
|
||||
tooltipIdx,
|
||||
tooltipOffset,
|
||||
};
|
||||
|
@ -153,7 +155,11 @@ const LineChart: FC<LineChartProps> = ({
|
|||
};
|
||||
|
||||
const getRangeX = (): Range.MinMax => [xRange.min, xRange.max];
|
||||
|
||||
const getRangeY = (u: uPlot, min = 0, max = 1, axis: string): Range.MinMax => {
|
||||
if (axis == "1") {
|
||||
setYRange([min, max]);
|
||||
}
|
||||
if (yaxis.limits.enable) return yaxis.limits.range[axis];
|
||||
return getMinMaxBuffer(min, max);
|
||||
};
|
||||
|
@ -258,6 +264,7 @@ const LineChart: FC<LineChartProps> = ({
|
|||
u={uPlotInst}
|
||||
series={series}
|
||||
metrics={metrics}
|
||||
yRange={yRange}
|
||||
tooltipIdx={tooltipIdx}
|
||||
tooltipOffset={tooltipOffset}
|
||||
id={tooltipId}
|
||||
|
|
|
@ -15,7 +15,6 @@ interface ExploreMetricItemGraphProps {
|
|||
instance: string,
|
||||
rateEnabled: boolean,
|
||||
isBucket: boolean,
|
||||
showLegend: boolean
|
||||
height?: number
|
||||
}
|
||||
|
||||
|
@ -25,7 +24,6 @@ const ExploreMetricItem: FC<ExploreMetricItemGraphProps> = ({
|
|||
instance,
|
||||
rateEnabled,
|
||||
isBucket,
|
||||
showLegend,
|
||||
height
|
||||
}) => {
|
||||
const { customStep, yaxis } = useGraphState();
|
||||
|
@ -118,7 +116,7 @@ with (q = ${queryBase}) (
|
|||
yaxis={yaxis}
|
||||
setYaxisLimits={setYaxisLimits}
|
||||
setPeriod={setPeriod}
|
||||
showLegend={showLegend}
|
||||
showLegend={false}
|
||||
height={height}
|
||||
/>
|
||||
)}
|
||||
|
|
|
@ -3,37 +3,24 @@ import ExploreMetricItemGraph from "../ExploreMetricGraph/ExploreMetricItemGraph
|
|||
import ExploreMetricItemHeader from "../ExploreMetricItemHeader/ExploreMetricItemHeader";
|
||||
import "./style.scss";
|
||||
import useResize from "../../../hooks/useResize";
|
||||
import { GraphSize } from "../../../types";
|
||||
|
||||
interface ExploreMetricItemProps {
|
||||
name: string
|
||||
job: string
|
||||
instance: string
|
||||
index: number
|
||||
size: GraphSize
|
||||
onRemoveItem: (name: string) => void
|
||||
onChangeOrder: (name: string, oldIndex: number, newIndex: number) => void
|
||||
}
|
||||
|
||||
export const sizeVariants = [
|
||||
{
|
||||
id: "small",
|
||||
height: () => window.innerHeight * 0.2
|
||||
},
|
||||
{
|
||||
id: "medium",
|
||||
isDefault: true,
|
||||
height: () => window.innerHeight * 0.4
|
||||
},
|
||||
{
|
||||
id: "large",
|
||||
height: () => window.innerHeight * 0.8
|
||||
},
|
||||
];
|
||||
|
||||
const ExploreMetricItem: FC<ExploreMetricItemProps> = ({
|
||||
name,
|
||||
job,
|
||||
instance,
|
||||
index,
|
||||
size,
|
||||
onRemoveItem,
|
||||
onChangeOrder,
|
||||
}) => {
|
||||
|
@ -42,17 +29,10 @@ const ExploreMetricItem: FC<ExploreMetricItemProps> = ({
|
|||
const isBucket = useMemo(() => /_bucket?/.test(name), [name]);
|
||||
|
||||
const [rateEnabled, setRateEnabled] = useState(isCounter);
|
||||
const [showLegend, setShowLegend] = useState(false);
|
||||
const [size, setSize] = useState(sizeVariants.find(v => v.isDefault) || sizeVariants[0]);
|
||||
|
||||
const windowSize = useResize(document.body);
|
||||
const graphHeight = useMemo(size.height, [size, windowSize]);
|
||||
|
||||
const handleChangeSize = (id: string) => {
|
||||
const target = sizeVariants.find(variant => variant.id === id);
|
||||
if (target) setSize(target);
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
setRateEnabled(isCounter);
|
||||
}, [job]);
|
||||
|
@ -64,13 +44,10 @@ const ExploreMetricItem: FC<ExploreMetricItemProps> = ({
|
|||
index={index}
|
||||
isBucket={isBucket}
|
||||
rateEnabled={rateEnabled}
|
||||
showLegend={showLegend}
|
||||
size={size.id}
|
||||
onChangeRate={setRateEnabled}
|
||||
onChangeLegend={setShowLegend}
|
||||
onRemoveItem={onRemoveItem}
|
||||
onChangeOrder={onChangeOrder}
|
||||
onChangeSize={handleChangeSize}
|
||||
/>
|
||||
<ExploreMetricItemGraph
|
||||
key={`${name}_${job}_${instance}_${rateEnabled}`}
|
||||
|
@ -79,7 +56,6 @@ const ExploreMetricItem: FC<ExploreMetricItemProps> = ({
|
|||
instance={instance}
|
||||
rateEnabled={rateEnabled}
|
||||
isBucket={isBucket}
|
||||
showLegend={showLegend}
|
||||
height={graphHeight}
|
||||
/>
|
||||
</div>
|
||||
|
|
|
@ -1,24 +1,19 @@
|
|||
import React, { FC, useRef, useState } from "preact/compat";
|
||||
import React, { FC } from "preact/compat";
|
||||
import "./style.scss";
|
||||
import Switch from "../../Main/Switch/Switch";
|
||||
import Tooltip from "../../Main/Tooltip/Tooltip";
|
||||
import Button from "../../Main/Button/Button";
|
||||
import { ArrowDownIcon, CloseIcon, ResizeIcon } from "../../Main/Icons";
|
||||
import Popper from "../../Main/Popper/Popper";
|
||||
import ExploreMetricLayouts from "../ExploreMetricLayouts/ExploreMetricLayouts";
|
||||
import { ArrowDownIcon, CloseIcon } from "../../Main/Icons";
|
||||
|
||||
interface ExploreMetricItemControlsProps {
|
||||
name: string
|
||||
index: number
|
||||
isBucket: boolean
|
||||
rateEnabled: boolean
|
||||
showLegend: boolean
|
||||
size: string
|
||||
onChangeRate: (val: boolean) => void
|
||||
onChangeLegend: (val: boolean) => void
|
||||
onRemoveItem: (name: string) => void
|
||||
onChangeOrder: (name: string, oldIndex: number, newIndex: number) => void
|
||||
onChangeSize: (id: string) => void
|
||||
}
|
||||
|
||||
const ExploreMetricItemHeader: FC<ExploreMetricItemControlsProps> = ({
|
||||
|
@ -26,17 +21,11 @@ const ExploreMetricItemHeader: FC<ExploreMetricItemControlsProps> = ({
|
|||
index,
|
||||
isBucket,
|
||||
rateEnabled,
|
||||
showLegend,
|
||||
size,
|
||||
onChangeRate,
|
||||
onChangeLegend,
|
||||
onRemoveItem,
|
||||
onChangeOrder,
|
||||
onChangeSize
|
||||
}) => {
|
||||
|
||||
const layoutButtonRef = useRef<HTMLDivElement>(null);
|
||||
const [openPopper, setOpenPopper] = useState(false);
|
||||
const handleClickRemove = () => {
|
||||
onRemoveItem(name);
|
||||
};
|
||||
|
@ -49,19 +38,6 @@ const ExploreMetricItemHeader: FC<ExploreMetricItemControlsProps> = ({
|
|||
onChangeOrder(name, index, index - 1);
|
||||
};
|
||||
|
||||
const handleTogglePopper = () => {
|
||||
setOpenPopper(prev => !prev);
|
||||
};
|
||||
|
||||
const handleClosePopper = () => {
|
||||
setOpenPopper(false);
|
||||
};
|
||||
|
||||
const handleChangeSize = (id: string) => {
|
||||
onChangeSize(id);
|
||||
handleClosePopper();
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="vm-explore-metrics-item-header">
|
||||
<div className="vm-explore-metrics-item-header-order">
|
||||
|
@ -97,23 +73,7 @@ const ExploreMetricItemHeader: FC<ExploreMetricItemControlsProps> = ({
|
|||
/>
|
||||
</Tooltip>
|
||||
)}
|
||||
<Switch
|
||||
label="show legend"
|
||||
value={showLegend}
|
||||
onChange={onChangeLegend}
|
||||
/>
|
||||
<div className="vm-explore-metrics-item-header__layout">
|
||||
<Tooltip title="change size the graph">
|
||||
<div ref={layoutButtonRef}>
|
||||
<Button
|
||||
startIcon={<ResizeIcon/>}
|
||||
variant="text"
|
||||
color="gray"
|
||||
size="small"
|
||||
onClick={handleTogglePopper}
|
||||
/>
|
||||
</div>
|
||||
</Tooltip>
|
||||
<Tooltip title="close graph">
|
||||
<Button
|
||||
startIcon={<CloseIcon/>}
|
||||
|
@ -124,18 +84,6 @@ const ExploreMetricItemHeader: FC<ExploreMetricItemControlsProps> = ({
|
|||
/>
|
||||
</Tooltip>
|
||||
</div>
|
||||
|
||||
<Popper
|
||||
open={openPopper}
|
||||
onClose={handleClosePopper}
|
||||
placement="bottom-right"
|
||||
buttonRef={layoutButtonRef}
|
||||
>
|
||||
<ExploreMetricLayouts
|
||||
value={size}
|
||||
onChange={handleChangeSize}
|
||||
/>
|
||||
</Popper>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
|
|
@ -33,7 +33,6 @@
|
|||
|
||||
&__layout {
|
||||
display: grid;
|
||||
grid-template-columns: auto auto;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
|
|
|
@ -1,41 +0,0 @@
|
|||
import React, { FC } from "preact/compat";
|
||||
import "./style.scss";
|
||||
import { sizeVariants } from "../ExploreMetricItem/ExploreMetricItem";
|
||||
import classNames from "classnames";
|
||||
import { DoneIcon } from "../../Main/Icons";
|
||||
|
||||
interface ExploreMetricLayoutsProps {
|
||||
value: string
|
||||
onChange: (id: string) => void
|
||||
}
|
||||
|
||||
const ExploreMetricLayouts: FC<ExploreMetricLayoutsProps> = ({
|
||||
value,
|
||||
onChange
|
||||
}) => {
|
||||
|
||||
const createHandlerClick = (id: string) => () => {
|
||||
onChange(id);
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="vm-explore-metrics-layouts">
|
||||
{sizeVariants.map(variant => (
|
||||
<div
|
||||
className={classNames({
|
||||
"vm-list-item": true,
|
||||
"vm-list-item_multiselect": true,
|
||||
"vm-list-item_multiselect_selected": variant.id === value
|
||||
})}
|
||||
key={variant.id}
|
||||
onClick={createHandlerClick(variant.id)}
|
||||
>
|
||||
{variant.id === value && <DoneIcon/>}
|
||||
<span>{variant.id}</span>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default ExploreMetricLayouts;
|
|
@ -1,5 +0,0 @@
|
|||
@use "src/styles/variables" as *;
|
||||
|
||||
.vm-explore-metrics-layouts {
|
||||
display: grid;
|
||||
}
|
|
@ -5,6 +5,7 @@ import "./style.scss";
|
|||
import { useTimeState } from "../../../state/time/TimeStateContext";
|
||||
import { useGraphDispatch, useGraphState } from "../../../state/graph/GraphStateContext";
|
||||
import usePrevious from "../../../hooks/usePrevious";
|
||||
import { GRAPH_SIZES } from "../../../constants/graph";
|
||||
|
||||
interface ExploreMetricsHeaderProps {
|
||||
jobs: string[]
|
||||
|
@ -12,22 +13,28 @@ interface ExploreMetricsHeaderProps {
|
|||
names: string[]
|
||||
job: string
|
||||
instance: string
|
||||
size: string
|
||||
selectedMetrics: string[]
|
||||
onChangeJob: (job: string) => void
|
||||
onChangeInstance: (instance: string) => void
|
||||
onToggleMetric: (name: string) => void
|
||||
onChangeSize: (sizeId: string) => void
|
||||
}
|
||||
|
||||
const sizeOptions = GRAPH_SIZES.map(s => s.id);
|
||||
|
||||
const ExploreMetricsHeader: FC<ExploreMetricsHeaderProps> = ({
|
||||
jobs,
|
||||
instances,
|
||||
names,
|
||||
job,
|
||||
instance,
|
||||
size,
|
||||
selectedMetrics,
|
||||
onChangeJob,
|
||||
onChangeInstance,
|
||||
onToggleMetric
|
||||
onToggleMetric,
|
||||
onChangeSize
|
||||
}) => {
|
||||
|
||||
const { period: { step }, duration } = useTimeState();
|
||||
|
@ -60,7 +67,7 @@ const ExploreMetricsHeader: FC<ExploreMetricsHeaderProps> = ({
|
|||
label="Job"
|
||||
placeholder="Please select job"
|
||||
onChange={onChangeJob}
|
||||
autofocus
|
||||
autofocus={!job}
|
||||
/>
|
||||
</div>
|
||||
<div className="vm-explore-metrics-header__instance">
|
||||
|
@ -81,6 +88,14 @@ const ExploreMetricsHeader: FC<ExploreMetricsHeaderProps> = ({
|
|||
value={customStep}
|
||||
/>
|
||||
</div>
|
||||
<div className="vm-explore-metrics-header__size">
|
||||
<Select
|
||||
label="Size graphs"
|
||||
value={size}
|
||||
list={sizeOptions}
|
||||
onChange={onChangeSize}
|
||||
/>
|
||||
</div>
|
||||
<div className="vm-explore-metrics-header-metrics">
|
||||
<Select
|
||||
value={selectedMetrics}
|
||||
|
|
|
@ -8,18 +8,13 @@
|
|||
gap: $padding-small calc($padding-small + 10px);
|
||||
|
||||
&__job {
|
||||
flex-grow: 0.5;
|
||||
min-width: 200px;
|
||||
max-width: 300px;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
&__instance {
|
||||
min-width: 200px;
|
||||
max-width: 500px;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
&__step {
|
||||
flex-grow: 1;
|
||||
min-width: 300px;
|
||||
}
|
||||
|
||||
&-metrics {
|
||||
|
|
|
@ -19,7 +19,7 @@ const Footer: FC = () => {
|
|||
<a
|
||||
className="vm__link"
|
||||
target="_blank"
|
||||
href="https://github.com/VictoriaMetrics/VictoriaMetrics/issues/new"
|
||||
href="https://github.com/VictoriaMetrics/VictoriaMetrics/issues/new/choose"
|
||||
rel="noreferrer"
|
||||
>
|
||||
create an issue
|
||||
|
|
|
@ -14,10 +14,12 @@ import { getCssVariable } from "../../../utils/theme";
|
|||
import Tabs from "../../Main/Tabs/Tabs";
|
||||
import "./style.scss";
|
||||
import classNames from "classnames";
|
||||
import { useDashboardsState } from "../../../state/dashboards/DashboardsStateContext";
|
||||
|
||||
const Header: FC = () => {
|
||||
const primaryColor = getCssVariable("color-primary");
|
||||
const appModeEnable = getAppModeEnable();
|
||||
const { dashboardsSettings } = useDashboardsState();
|
||||
|
||||
const { headerStyles: {
|
||||
background = appModeEnable ? "#FFF" : primaryColor,
|
||||
|
@ -50,9 +52,9 @@ const Header: FC = () => {
|
|||
{
|
||||
label: routerOptions[router.dashboards].title,
|
||||
value: router.dashboards,
|
||||
hide: appModeEnable
|
||||
hide: appModeEnable || !dashboardsSettings.length
|
||||
}
|
||||
]), [appModeEnable]);
|
||||
]), [appModeEnable, dashboardsSettings]);
|
||||
|
||||
const [activeMenu, setActiveMenu] = useState(pathname);
|
||||
|
||||
|
|
|
@ -6,9 +6,11 @@ import { getAppModeEnable } from "../../utils/app-mode";
|
|||
import classNames from "classnames";
|
||||
import Footer from "./Footer/Footer";
|
||||
import { routerOptions } from "../../router";
|
||||
import { useFetchDashboards } from "../../pages/PredefinedPanels/hooks/useFetchDashboards";
|
||||
|
||||
const Layout: FC = () => {
|
||||
const appModeEnable = getAppModeEnable();
|
||||
useFetchDashboards();
|
||||
|
||||
const { pathname } = useLocation();
|
||||
useEffect(() => {
|
||||
|
|
|
@ -32,7 +32,7 @@
|
|||
max-width: calc(100% - $padding-global);
|
||||
padding: 0 3px;
|
||||
font-size: $font-size-small;
|
||||
line-height: $font-size-small;
|
||||
line-height: calc($font-size-small + 2px);
|
||||
pointer-events: none;
|
||||
user-select: none;
|
||||
background-color: $color-background-block;
|
||||
|
@ -46,12 +46,12 @@
|
|||
}
|
||||
|
||||
&__label {
|
||||
top: calc($font-size-small/-2);
|
||||
top: calc(($font-size-small/-2) - 2px);
|
||||
color: $color-text-secondary;
|
||||
}
|
||||
|
||||
&__error {
|
||||
top: calc(100% - ($font-size-small/2));
|
||||
top: calc((100% - ($font-size-small/2)) - 2px);
|
||||
color: $color-error;
|
||||
}
|
||||
|
||||
|
|
|
@ -1,6 +1,24 @@
|
|||
import { GraphSize } from "../types";
|
||||
|
||||
export const MAX_QUERY_FIELDS = 4;
|
||||
export const DEFAULT_MAX_SERIES = {
|
||||
table: 100,
|
||||
chart: 20,
|
||||
code: 1000,
|
||||
};
|
||||
|
||||
export const GRAPH_SIZES: GraphSize[] = [
|
||||
{
|
||||
id: "small",
|
||||
height: () => window.innerHeight * 0.2
|
||||
},
|
||||
{
|
||||
id: "medium",
|
||||
isDefault: true,
|
||||
height: () => window.innerHeight * 0.4
|
||||
},
|
||||
{
|
||||
id: "large",
|
||||
height: () => window.innerHeight * 0.8
|
||||
},
|
||||
];
|
||||
|
|
|
@ -8,6 +8,7 @@ import { TopQueriesStateProvider } from "../state/topQueries/TopQueriesStateCont
|
|||
import { SnackbarProvider } from "./Snackbar";
|
||||
|
||||
import { combineComponents } from "../utils/combine-components";
|
||||
import { DashboardsStateProvider } from "../state/dashboards/DashboardsStateContext";
|
||||
|
||||
const providers = [
|
||||
AppStateProvider,
|
||||
|
@ -17,7 +18,8 @@ const providers = [
|
|||
GraphStateProvider,
|
||||
CardinalityStateProvider,
|
||||
TopQueriesStateProvider,
|
||||
SnackbarProvider
|
||||
SnackbarProvider,
|
||||
DashboardsStateProvider
|
||||
];
|
||||
|
||||
export default combineComponents(...providers);
|
||||
|
|
|
@ -3,7 +3,14 @@ import { compactObject } from "../../../utils/object";
|
|||
import { useTimeState } from "../../../state/time/TimeStateContext";
|
||||
import { setQueryStringWithoutPageReload } from "../../../utils/query-string";
|
||||
|
||||
export const useSetQueryParams = () => {
|
||||
interface queryProps {
|
||||
job: string
|
||||
instance?: string
|
||||
metrics: string
|
||||
size: string
|
||||
}
|
||||
|
||||
export const useSetQueryParams = ({ job, instance, metrics, size }: queryProps) => {
|
||||
const { duration, relativeTime, period: { date, step } } = useTimeState();
|
||||
|
||||
const setSearchParamsFromState = () => {
|
||||
|
@ -11,12 +18,16 @@ export const useSetQueryParams = () => {
|
|||
["g0.range_input"]: duration,
|
||||
["g0.end_input"]: date,
|
||||
["g0.step_input"]: step,
|
||||
["g0.relative_time"]: relativeTime
|
||||
["g0.relative_time"]: relativeTime,
|
||||
size,
|
||||
job,
|
||||
instance,
|
||||
metrics
|
||||
});
|
||||
|
||||
setQueryStringWithoutPageReload(params);
|
||||
};
|
||||
|
||||
useEffect(setSearchParamsFromState, [duration, relativeTime, date, step]);
|
||||
useEffect(setSearchParamsFromState, [duration, relativeTime, date, step, job, instance, metrics, size]);
|
||||
useEffect(setSearchParamsFromState, []);
|
||||
};
|
||||
|
|
|
@ -8,13 +8,22 @@ import { useFetchNames } from "./hooks/useFetchNames";
|
|||
import "./style.scss";
|
||||
import ExploreMetricItem from "../../components/ExploreMetrics/ExploreMetricItem/ExploreMetricItem";
|
||||
import ExploreMetricsHeader from "../../components/ExploreMetrics/ExploreMetricsHeader/ExploreMetricsHeader";
|
||||
import { GRAPH_SIZES } from "../../constants/graph";
|
||||
import { getQueryStringValue } from "../../utils/query-string";
|
||||
|
||||
const defaultJob = getQueryStringValue("job", "") as string;
|
||||
const defaultInstance = getQueryStringValue("instance", "") as string;
|
||||
const defaultMetricsStr = getQueryStringValue("metrics", "") as string;
|
||||
const defaultSizeId = getQueryStringValue("size", "") as string;
|
||||
const defaultSize = GRAPH_SIZES.find(v => defaultSizeId ? v.id === defaultSizeId : v.isDefault) || GRAPH_SIZES[0];
|
||||
|
||||
const ExploreMetrics: FC = () => {
|
||||
useSetQueryParams();
|
||||
const [job, setJob] = useState(defaultJob);
|
||||
const [instance, setInstance] = useState(defaultInstance);
|
||||
const [metrics, setMetrics] = useState(defaultMetricsStr ? defaultMetricsStr.split("&") : []);
|
||||
const [size, setSize] = useState(defaultSize);
|
||||
|
||||
const [job, setJob] = useState("");
|
||||
const [instance, setInstance] = useState("");
|
||||
const [metrics, setMetrics] = useState<string[]>([]);
|
||||
useSetQueryParams({ job, instance, metrics: metrics.join("&"), size: size.id });
|
||||
|
||||
const { jobs, isLoading: loadingJobs, error: errorJobs } = useFetchJobs();
|
||||
const { instances, isLoading: loadingInstances, error: errorInstances } = useFetchInstances(job);
|
||||
|
@ -36,6 +45,11 @@ const ExploreMetrics: FC = () => {
|
|||
}
|
||||
};
|
||||
|
||||
const handleChangeSize = (sizeId: string) => {
|
||||
const target = GRAPH_SIZES.find(variant => variant.id === sizeId);
|
||||
if (target) setSize(target);
|
||||
};
|
||||
|
||||
const handleChangeOrder = (name: string, oldIndex: number, newIndex: number) => {
|
||||
const maxIndex = newIndex > (metrics.length - 1);
|
||||
const minIndex = newIndex < 0;
|
||||
|
@ -49,8 +63,10 @@ const ExploreMetrics: FC = () => {
|
|||
};
|
||||
|
||||
useEffect(() => {
|
||||
setInstance("");
|
||||
}, [job]);
|
||||
if (instance && instances.length && !instances.includes(instance)) {
|
||||
setInstance("");
|
||||
}
|
||||
}, [instances, instance]);
|
||||
|
||||
return (
|
||||
<div className="vm-explore-metrics">
|
||||
|
@ -59,9 +75,11 @@ const ExploreMetrics: FC = () => {
|
|||
instances={instances}
|
||||
names={names}
|
||||
job={job}
|
||||
size={size.id}
|
||||
instance={instance}
|
||||
selectedMetrics={metrics}
|
||||
onChangeJob={setJob}
|
||||
onChangeSize={handleChangeSize}
|
||||
onChangeInstance={setInstance}
|
||||
onToggleMetric={handleToggleMetric}
|
||||
/>
|
||||
|
@ -78,6 +96,7 @@ const ExploreMetrics: FC = () => {
|
|||
job={job}
|
||||
instance={instance}
|
||||
index={i}
|
||||
size={size}
|
||||
onRemoveItem={handleToggleMetric}
|
||||
onChangeOrder={handleChangeOrder}
|
||||
/>
|
||||
|
|
|
@ -1,12 +0,0 @@
|
|||
import { DashboardSettings } from "../../types";
|
||||
|
||||
const importModule = async (filename: string) => {
|
||||
const data = await fetch(`./dashboards/${filename}`);
|
||||
const json = await data.json();
|
||||
return json as DashboardSettings;
|
||||
};
|
||||
|
||||
export default async () => {
|
||||
const filenames = window.__VMUI_PREDEFINED_DASHBOARDS__;
|
||||
return await Promise.all(filenames.map(async f => importModule(f)));
|
||||
};
|
|
@ -0,0 +1,78 @@
|
|||
import { useEffect, useState } from "preact/compat";
|
||||
import { DashboardSettings, ErrorTypes } from "../../../types";
|
||||
import { useAppState } from "../../../state/common/StateContext";
|
||||
import { useDashboardsDispatch } from "../../../state/dashboards/DashboardsStateContext";
|
||||
import { getAppModeEnable } from "../../../utils/app-mode";
|
||||
|
||||
const importModule = async (filename: string) => {
|
||||
const data = await fetch(`./dashboards/${filename}`);
|
||||
const json = await data.json();
|
||||
return json as DashboardSettings;
|
||||
};
|
||||
|
||||
export const useFetchDashboards = (): {
|
||||
isLoading: boolean,
|
||||
error?: ErrorTypes | string,
|
||||
dashboardsSettings: DashboardSettings[],
|
||||
} => {
|
||||
|
||||
const appModeEnable = getAppModeEnable();
|
||||
const { serverUrl } = useAppState();
|
||||
const dispatch = useDashboardsDispatch();
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
const [error, setError] = useState<ErrorTypes | string>("");
|
||||
const [dashboardsSettings, setDashboards] = useState<DashboardSettings[]>([]);
|
||||
|
||||
const fetchLocalDashboards = async () => {
|
||||
const filenames = window.__VMUI_PREDEFINED_DASHBOARDS__;
|
||||
if (!filenames?.length) return [];
|
||||
return await Promise.all(filenames.map(async f => importModule(f)));
|
||||
};
|
||||
|
||||
const fetchRemoteDashboards = async () => {
|
||||
if (!serverUrl) return;
|
||||
setError("");
|
||||
setIsLoading(true);
|
||||
|
||||
try {
|
||||
const response = await fetch(`${serverUrl}/vmui/custom-dashboards`);
|
||||
const resp = await response.json();
|
||||
|
||||
if (response.ok) {
|
||||
const { dashboardsSettings } = resp;
|
||||
if (dashboardsSettings && dashboardsSettings.length > 0) {
|
||||
setDashboards((prevDash) => [...prevDash, ...dashboardsSettings]);
|
||||
}
|
||||
setIsLoading(false);
|
||||
} else {
|
||||
setError(resp.error);
|
||||
setIsLoading(false);
|
||||
}
|
||||
} catch (e) {
|
||||
setIsLoading(false);
|
||||
if (e instanceof Error) setError(`${e.name}: ${e.message}`);
|
||||
}
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
if (appModeEnable) return;
|
||||
setDashboards([]);
|
||||
fetchLocalDashboards().then(d => d.length && setDashboards((prevDash) => [...d, ...prevDash]));
|
||||
fetchRemoteDashboards();
|
||||
}, [serverUrl]);
|
||||
|
||||
useEffect(() => {
|
||||
dispatch({ type: "SET_DASHBOARDS_SETTINGS", payload: dashboardsSettings });
|
||||
}, [dashboardsSettings]);
|
||||
|
||||
useEffect(() => {
|
||||
dispatch({ type: "SET_DASHBOARDS_LOADING", payload: isLoading });
|
||||
}, [isLoading]);
|
||||
|
||||
useEffect(() => {
|
||||
dispatch({ type: "SET_DASHBOARDS_ERROR", payload: error });
|
||||
}, [error]);
|
||||
|
||||
return { dashboardsSettings, isLoading, error };
|
||||
};
|
||||
|
|
@ -1,60 +1,67 @@
|
|||
import React, { FC, useEffect, useMemo, useState } from "preact/compat";
|
||||
import getDashboardSettings from "./getDashboardSettings";
|
||||
import { DashboardSettings } from "../../types";
|
||||
import React, { FC, useMemo, useState } from "preact/compat";
|
||||
import PredefinedDashboard from "./PredefinedDashboard/PredefinedDashboard";
|
||||
import { useSetQueryParams } from "./hooks/useSetQueryParams";
|
||||
import Tabs from "../../components/Main/Tabs/Tabs";
|
||||
import Alert from "../../components/Main/Alert/Alert";
|
||||
import classNames from "classnames";
|
||||
import "./style.scss";
|
||||
import { useDashboardsState } from "../../state/dashboards/DashboardsStateContext";
|
||||
import Spinner from "../../components/Main/Spinner/Spinner";
|
||||
|
||||
const Index: FC = () => {
|
||||
const DashboardsLayout: FC = () => {
|
||||
useSetQueryParams();
|
||||
const { dashboardsSettings, dashboardsLoading, dashboardsError } = useDashboardsState();
|
||||
const [dashboard, setDashboard] = useState(0);
|
||||
|
||||
const [dashboards, setDashboards] = useState<DashboardSettings[]>([]);
|
||||
const [tab, setTab] = useState("0");
|
||||
|
||||
const tabs = useMemo(() => dashboards.map((d, i) => ({
|
||||
const dashboards = useMemo(() => dashboardsSettings.map((d, i) => ({
|
||||
label: d.title || "",
|
||||
value: `${i}`,
|
||||
className: "vm-predefined-panels-tabs__tab"
|
||||
})), [dashboards]);
|
||||
value: i,
|
||||
})), [dashboardsSettings]);
|
||||
|
||||
const activeDashboard = useMemo(() => dashboards[+tab] || {}, [dashboards, tab]);
|
||||
const activeDashboard = useMemo(() => dashboardsSettings[dashboard] || {}, [dashboardsSettings, dashboard]);
|
||||
const rows = useMemo(() => activeDashboard?.rows, [activeDashboard]);
|
||||
const filename = useMemo(() => activeDashboard.title || activeDashboard.filename || "", [activeDashboard]);
|
||||
const validDashboardRows = useMemo(() => Array.isArray(rows) && !!rows.length, [rows]);
|
||||
|
||||
const handleChangeTab = (value: string) => {
|
||||
setTab(value);
|
||||
const handleChangeDashboard = (value: number) => {
|
||||
setDashboard(value);
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
getDashboardSettings().then(d => d.length && setDashboards(d));
|
||||
}, []);
|
||||
const createHandlerSelectDashboard = (value: number) => () => {
|
||||
handleChangeDashboard(value);
|
||||
};
|
||||
|
||||
return <div className="vm-predefined-panels">
|
||||
{!dashboards.length && <Alert variant="info">Dashboards not found</Alert>}
|
||||
{tabs.length > 1 && (
|
||||
<div className="vm-predefined-panels-tabs vm-block vm-block_empty-padding">
|
||||
<Tabs
|
||||
activeItem={tab}
|
||||
items={tabs}
|
||||
onChange={handleChangeTab}
|
||||
/>
|
||||
{dashboardsLoading && <Spinner />}
|
||||
{dashboardsError && <Alert variant="error">{dashboardsError}</Alert>}
|
||||
{!dashboardsSettings.length && <Alert variant="info">Dashboards not found</Alert>}
|
||||
{dashboards.length > 1 && (
|
||||
<div className="vm-predefined-panels-tabs vm-block">
|
||||
{dashboards.map(tab => (
|
||||
<div
|
||||
key={tab.value}
|
||||
className={classNames({
|
||||
"vm-predefined-panels-tabs__tab": true,
|
||||
"vm-predefined-panels-tabs__tab_active": tab.value == dashboard
|
||||
})}
|
||||
onClick={createHandlerSelectDashboard(tab.value)}
|
||||
>
|
||||
{tab.label}
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
<div className="vm-predefined-panels__dashboards">
|
||||
{validDashboardRows && (
|
||||
rows.map((r,i) =>
|
||||
<PredefinedDashboard
|
||||
key={`${tab}_${i}`}
|
||||
key={`${dashboard}_${i}`}
|
||||
index={i}
|
||||
filename={filename}
|
||||
title={r.title}
|
||||
panels={r.panels}
|
||||
/>)
|
||||
)}
|
||||
{!!dashboards.length && !validDashboardRows && (
|
||||
{!!dashboardsSettings.length && !validDashboardRows && (
|
||||
<Alert variant="error">
|
||||
<code>"rows"</code> not found. Check the configuration file <b>{filename}</b>.
|
||||
</Alert>
|
||||
|
@ -63,4 +70,4 @@ const Index: FC = () => {
|
|||
</div>;
|
||||
};
|
||||
|
||||
export default Index;
|
||||
export default DashboardsLayout;
|
||||
|
|
|
@ -5,22 +5,37 @@
|
|||
gap: $padding-global;
|
||||
align-items: flex-start;
|
||||
|
||||
&-tabs.vm-block {
|
||||
padding: $padding-global;
|
||||
}
|
||||
|
||||
&-tabs {
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
align-items: center;
|
||||
justify-content: flex-start;
|
||||
font-size: $font-size-small;
|
||||
gap: $padding-small;
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
|
||||
&__tab {
|
||||
padding: $padding-global;
|
||||
padding: $padding-small $padding-global;
|
||||
border-radius: $border-radius-medium;
|
||||
cursor: pointer;
|
||||
transition: opacity 200ms ease-in-out, color 150ms ease-in;
|
||||
border-right: $border-divider;
|
||||
transition: background 200ms ease-in-out, color 150ms ease-in;
|
||||
background: $color-white;
|
||||
text-transform: uppercase;
|
||||
color: rgba($color-black, 0.2);
|
||||
border: 1px solid rgba($color-black, 0.2);
|
||||
|
||||
&:hover {
|
||||
opacity: 1;
|
||||
color: $color-primary;
|
||||
}
|
||||
|
||||
&_active {
|
||||
border-color: $color-primary;
|
||||
color: $color-primary;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,24 @@
|
|||
import React, { createContext, FC, useContext, useMemo, useReducer } from "preact/compat";
|
||||
import { DashboardsAction, DashboardsState, initialDashboardsState, reducer } from "./reducer";
|
||||
|
||||
import { Dispatch } from "react";
|
||||
|
||||
type DashboardsStateContextType = { state: DashboardsState, dispatch: Dispatch<DashboardsAction> };
|
||||
|
||||
export const DashboardsStateContext = createContext<DashboardsStateContextType>({} as DashboardsStateContextType);
|
||||
|
||||
export const useDashboardsState = (): DashboardsState => useContext(DashboardsStateContext).state;
|
||||
export const useDashboardsDispatch = (): Dispatch<DashboardsAction> => useContext(DashboardsStateContext).dispatch;
|
||||
export const DashboardsStateProvider: FC = ({ children }) => {
|
||||
const [state, dispatch] = useReducer(reducer, initialDashboardsState);
|
||||
|
||||
const contextValue = useMemo(() => {
|
||||
return { state, dispatch };
|
||||
}, [state, dispatch]);
|
||||
|
||||
return <DashboardsStateContext.Provider value={contextValue}>
|
||||
{children}
|
||||
</DashboardsStateContext.Provider>;
|
||||
};
|
||||
|
||||
|
41
app/vmui/packages/vmui/src/state/dashboards/reducer.ts
Normal file
41
app/vmui/packages/vmui/src/state/dashboards/reducer.ts
Normal file
|
@ -0,0 +1,41 @@
|
|||
import { DashboardSettings } from "../../types";
|
||||
|
||||
export interface DashboardsState {
|
||||
dashboardsSettings: DashboardSettings[];
|
||||
dashboardsLoading: boolean,
|
||||
dashboardsError: string
|
||||
}
|
||||
|
||||
export type DashboardsAction =
|
||||
| { type: "SET_DASHBOARDS_SETTINGS", payload: DashboardSettings[] }
|
||||
| { type: "SET_DASHBOARDS_LOADING", payload: boolean }
|
||||
| { type: "SET_DASHBOARDS_ERROR", payload: string }
|
||||
|
||||
|
||||
export const initialDashboardsState: DashboardsState = {
|
||||
dashboardsSettings: [],
|
||||
dashboardsLoading: false,
|
||||
dashboardsError: "",
|
||||
};
|
||||
|
||||
export function reducer(state: DashboardsState, action: DashboardsAction): DashboardsState {
|
||||
switch (action.type) {
|
||||
case "SET_DASHBOARDS_SETTINGS":
|
||||
return {
|
||||
...state,
|
||||
dashboardsSettings: action.payload
|
||||
};
|
||||
case "SET_DASHBOARDS_LOADING":
|
||||
return {
|
||||
...state,
|
||||
dashboardsLoading: action.payload
|
||||
};
|
||||
case "SET_DASHBOARDS_ERROR":
|
||||
return {
|
||||
...state,
|
||||
dashboardsError: action.payload
|
||||
};
|
||||
default:
|
||||
throw new Error();
|
||||
}
|
||||
}
|
|
@ -112,3 +112,9 @@ export interface Timezone {
|
|||
utc: string,
|
||||
search?: string
|
||||
}
|
||||
|
||||
export interface GraphSize {
|
||||
id: string,
|
||||
isDefault?: boolean,
|
||||
height: () => number
|
||||
}
|
||||
|
|
|
@ -25,14 +25,26 @@ export const defaultOptions = {
|
|||
};
|
||||
|
||||
export const formatTicks = (u: uPlot, ticks: number[], unit = ""): string[] => {
|
||||
return ticks.map(v => `${formatPrettyNumber(v)} ${unit}`);
|
||||
const min = ticks[0];
|
||||
const max = ticks[ticks.length-1];
|
||||
if (!unit) {
|
||||
return ticks.map(v => formatPrettyNumber(v, min, max));
|
||||
}
|
||||
return ticks.map(v => `${formatPrettyNumber(v, min, max)} ${unit}`);
|
||||
};
|
||||
|
||||
export const formatPrettyNumber = (n: number | null | undefined): string => {
|
||||
export const formatPrettyNumber = (n: number | null | undefined, min = 0, max = 0): string => {
|
||||
if (n === undefined || n === null) {
|
||||
return "";
|
||||
}
|
||||
return n.toLocaleString("en-US", { maximumSignificantDigits: 20 });
|
||||
let digits = 3 + Math.floor(1 + Math.log10(Math.max(Math.abs(min), Math.abs(max))) - Math.log10(Math.abs(min - max)));
|
||||
if (isNaN(digits) || digits > 20) {
|
||||
digits = 20;
|
||||
}
|
||||
return n.toLocaleString("en-US", {
|
||||
minimumSignificantDigits: digits,
|
||||
maximumSignificantDigits: digits,
|
||||
});
|
||||
};
|
||||
|
||||
interface AxisExtend extends Axis {
|
||||
|
|
|
@ -85,7 +85,7 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
"description": "Overview for cluster VictoriaMetrics v1.83.0 or higher",
|
||||
"description": "Overview for cluster VictoriaMetrics v1.86.0 or higher",
|
||||
"editable": true,
|
||||
"fiscalYearStartMonth": 0,
|
||||
"graphTooltip": 1,
|
||||
|
@ -7098,7 +7098,7 @@
|
|||
"type": "prometheus",
|
||||
"uid": "$ds"
|
||||
},
|
||||
"description": "Shows the max number of ongoing insertions.\n* `max` - equal to number of CPU * 4 by default. May be configured with `maxConcurrentInserts` flag;\n* `current` - current number of goroutines busy with processing requests.\n\n`-maxConcurrentInserts` limits the number of insert requests which may be actively processed at any given point in time. All the other insert requests are queued for up to `-insert.maxQueueDuration` in the hope they will get a chance to be processed. This queue is used mostly for absorbing spikes for incoming insert request rate.\n\nWhen `current` hits `max` constantly, it means vminsert node is overloaded and requires more CPU or higher limits.",
|
||||
"description": "Shows the max number of ongoing insertions.\n* `max` - equal to number of CPU * 2 by default. May be configured with `maxConcurrentInserts` flag;\n* `current` - current number of goroutines busy with processing requests.\n\n`-maxConcurrentInserts` limits the number of insert requests which may be actively processed at any given point in time. All the other insert requests are queued for up to `-insert.maxQueueDuration` in the hope they will get a chance to be processed. This queue is used mostly for absorbing spikes for incoming insert request rate.\n\nWhen `current` hits `max` constantly, it means vminsert node is overloaded and requires more CPU or higher limits.",
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": {
|
||||
|
|
|
@ -15,6 +15,10 @@ The following tip changes can be tested by building VictoriaMetrics components f
|
|||
|
||||
## tip
|
||||
|
||||
* FEATURE: [vmui](https://docs.victoriametrics.com/#vmui): add ability to show custom dashboards at vmui by specifying a path to a directory with dashboard config files via `-vmui.customDashboardsPath` command-line flag. See [this feature request](https://github.com/VictoriaMetrics/VictoriaMetrics/issues/3322) and [these docs](https://github.com/VictoriaMetrics/VictoriaMetrics/tree/master/app/vmui/packages/vmui/public/dashboards).
|
||||
|
||||
* BUGFIX: [vmagent](https://docs.victoriametrics.com/vmagent.html): [dockerswarm_sd_configs](https://docs.victoriametrics.com/sd_configs.html#dockerswarm_sd_configs): apply `filters` only to objects of the specified `role`. Previously filters were applied to all the objects, which could cause errors when different types of objects were used with filters that were not compatible with them. See [this issue](https://github.com/VictoriaMetrics/VictoriaMetrics/issues/3579).
|
||||
|
||||
|
||||
## [v1.86.1](https://github.com/VictoriaMetrics/VictoriaMetrics/releases/tag/v1.86.1)
|
||||
|
||||
|
|
|
@ -948,6 +948,8 @@ Below is the output for `/path/to/vminsert -help`:
|
|||
Show VictoriaMetrics version
|
||||
-vmstorageDialTimeout duration
|
||||
Timeout for establishing RPC connections from vminsert to vmstorage (default 5s)
|
||||
-vmui.customDashboardsPath string
|
||||
Optional path to vmui dashboards. See https://github.com/VictoriaMetrics/VictoriaMetrics/tree/master/app/vmui/packages/vmui/public/dashboards
|
||||
```
|
||||
|
||||
### List of command-line flags for vmselect
|
||||
|
|
|
@ -2504,4 +2504,6 @@ Pass `-help` to VictoriaMetrics in order to see the list of supported command-li
|
|||
Show VictoriaMetrics version
|
||||
-vmalert.proxyURL string
|
||||
Optional URL for proxying requests to vmalert. For example, if -vmalert.proxyURL=http://vmalert:8880 , then alerting API requests such as /api/v1/rules from Grafana will be proxied to http://vmalert:8880/api/v1/rules
|
||||
-vmui.customDashboardsPath string
|
||||
Optional path to vmui dashboards. See https://github.com/VictoriaMetrics/VictoriaMetrics/tree/master/app/vmui/packages/vmui/public/dashboards
|
||||
```
|
||||
|
|
|
@ -2507,4 +2507,6 @@ Pass `-help` to VictoriaMetrics in order to see the list of supported command-li
|
|||
Show VictoriaMetrics version
|
||||
-vmalert.proxyURL string
|
||||
Optional URL for proxying requests to vmalert. For example, if -vmalert.proxyURL=http://vmalert:8880 , then alerting API requests such as /api/v1/rules from Grafana will be proxied to http://vmalert:8880/api/v1/rules
|
||||
-vmui.customDashboardsPath string
|
||||
Optional path to vmui dashboards. See https://github.com/VictoriaMetrics/VictoriaMetrics/tree/master/app/vmui/packages/vmui/public/dashboards
|
||||
```
|
||||
|
|
|
@ -1,5 +1,14 @@
|
|||
---
|
||||
sort: 15
|
||||
weight: 15
|
||||
title: FAQ
|
||||
menu:
|
||||
docs:
|
||||
parent: "operator"
|
||||
weight: 15
|
||||
identifier: "faq-operator"
|
||||
aliases:
|
||||
- /operator/FAQ.html
|
||||
---
|
||||
|
||||
# FAQ
|
||||
|
|
|
@ -1,5 +1,13 @@
|
|||
---
|
||||
sort: 1
|
||||
weight: 1
|
||||
title: VictoriaMetrics Operator Overview
|
||||
menu:
|
||||
docs:
|
||||
parent: "operator"
|
||||
weight: 1
|
||||
aliases:
|
||||
- /operator/VictoriaMetrics-Operator.html
|
||||
---
|
||||
|
||||
# VictoriaMetrics operator
|
||||
|
|
|
@ -1,5 +1,13 @@
|
|||
---
|
||||
sort: 2
|
||||
weight: 2
|
||||
title: Additional Scrape Configuration
|
||||
menu:
|
||||
docs:
|
||||
parent: "operator"
|
||||
weight: 2
|
||||
aliases:
|
||||
- operator/additional-scrape.html
|
||||
---
|
||||
|
||||
# Additional Scrape Configuration
|
|
@ -1,3 +1,14 @@
|
|||
---
|
||||
sort: 16
|
||||
title: API Docs
|
||||
weight: 16
|
||||
menu:
|
||||
docs:
|
||||
parent: "operator"
|
||||
weight: 16
|
||||
aliases:
|
||||
- /operator/api.html
|
||||
---
|
||||
|
||||
# API Docs
|
||||
|
|
@ -1,5 +1,13 @@
|
|||
---
|
||||
sort: 4
|
||||
weight: 4
|
||||
title: Authorization and exposing components
|
||||
menu:
|
||||
docs:
|
||||
parent: "operator"
|
||||
weight: 4
|
||||
aliases:
|
||||
- /operator/auth.html
|
||||
---
|
||||
|
||||
# Authorization and exposing components
|
|
@ -1,5 +1,13 @@
|
|||
---
|
||||
sort: 5
|
||||
weight: 5
|
||||
title: Backups
|
||||
menu:
|
||||
docs:
|
||||
parent: "operator"
|
||||
weight: 5
|
||||
aliases:
|
||||
- /operator/backups.html
|
||||
---
|
||||
|
||||
# Backups
|
|
@ -1,5 +1,13 @@
|
|||
---
|
||||
sort: 14
|
||||
weight: 14
|
||||
title: Configuration synchronization
|
||||
menu:
|
||||
docs:
|
||||
parent: "operator"
|
||||
weight: 14
|
||||
aliases:
|
||||
- /operator/configuration_syncronization.html
|
||||
---
|
||||
|
||||
# Configuration synchronization
|
|
@ -1,5 +1,13 @@
|
|||
---
|
||||
sort: 6
|
||||
weight: 6
|
||||
title: Design
|
||||
menu:
|
||||
docs:
|
||||
parent: "operator"
|
||||
weight: 6
|
||||
aliases:
|
||||
- /operator/design.html
|
||||
---
|
||||
|
||||
# Design
|
|
@ -1,5 +1,13 @@
|
|||
---
|
||||
sort: 7
|
||||
weight: 7
|
||||
title: High Availability
|
||||
menu:
|
||||
docs:
|
||||
parent: "operator"
|
||||
weight: 7
|
||||
aliases:
|
||||
- /operator/high-availability.html
|
||||
---
|
||||
|
||||
# High Availability
|
|
@ -1,5 +1,13 @@
|
|||
---
|
||||
sort: 8
|
||||
weight: 8
|
||||
title: Managing application versions
|
||||
menu:
|
||||
docs:
|
||||
parent: "operator"
|
||||
weight: 8
|
||||
aliases:
|
||||
- /operator/managing-versions.html
|
||||
---
|
||||
|
||||
|
|
@ -1,5 +1,14 @@
|
|||
---
|
||||
sort: 9
|
||||
weight: 9
|
||||
title: Quick start
|
||||
menu:
|
||||
docs:
|
||||
parent: "operator"
|
||||
weight: 9
|
||||
identifier: "quickstartoperator"
|
||||
aliases:
|
||||
- /operator/quick-start.html
|
||||
---
|
||||
|
||||
# Quick start
|
|
@ -1,5 +1,13 @@
|
|||
---
|
||||
sort: 10
|
||||
weight: 10
|
||||
title: Relabeling
|
||||
menu:
|
||||
docs:
|
||||
parent: "operator"
|
||||
weight: 10
|
||||
aliases:
|
||||
- /operator/relabeling.html
|
||||
---
|
||||
|
||||
# Relabeling
|
|
@ -1,5 +1,13 @@
|
|||
---
|
||||
sort: 11
|
||||
weight: 11
|
||||
title: CRD Validation
|
||||
menu:
|
||||
docs:
|
||||
parent: "operator"
|
||||
weight: 11
|
||||
aliases:
|
||||
- /operator/resources-validation.html
|
||||
---
|
||||
|
||||
# CRD Validation
|
|
@ -1,5 +1,13 @@
|
|||
---
|
||||
sort: 12
|
||||
weight: 12
|
||||
title: Security
|
||||
menu:
|
||||
docs:
|
||||
parent: "operator"
|
||||
weight: 12
|
||||
aliases:
|
||||
- /operator/security.html
|
||||
---
|
||||
|
||||
# Security
|
|
@ -1,3 +1,14 @@
|
|||
---
|
||||
sort: 14
|
||||
weight: 14
|
||||
title: Variables
|
||||
menu:
|
||||
docs:
|
||||
parent: "operator"
|
||||
weight: 14
|
||||
aliases:
|
||||
- /operator/vars.html
|
||||
---
|
||||
# Auto Generated vars for package config
|
||||
updated at Fri Jan 21 15:57:41 UTC 2022
|
||||
|
|
@ -1,5 +1,13 @@
|
|||
---
|
||||
sort: 13
|
||||
weight: 13
|
||||
title: Managing configuration for VMAlertmanager
|
||||
menu:
|
||||
docs:
|
||||
parent: "operator"
|
||||
weight: 13
|
||||
aliases:
|
||||
- /operator/vmalertmanager-configuration.html
|
||||
---
|
||||
|
||||
# Managing configuration for VMAlertmanager
|
|
@ -16,6 +16,11 @@ type apiConfig struct {
|
|||
client *discoveryutils.Client
|
||||
port int
|
||||
|
||||
// role is the type of objects to discover.
|
||||
//
|
||||
// filtersQueryArg is applied only to the given role - the rest of objects are queried without filters.
|
||||
role string
|
||||
|
||||
// filtersQueryArg contains escaped `filters` query arg to add to each request to Docker Swarm API.
|
||||
filtersQueryArg string
|
||||
}
|
||||
|
@ -49,11 +54,12 @@ func newAPIConfig(sdc *SDConfig, baseDir string) (*apiConfig, error) {
|
|||
return nil, fmt.Errorf("cannot create HTTP client for %q: %w", sdc.Host, err)
|
||||
}
|
||||
cfg.client = client
|
||||
cfg.role = sdc.Role
|
||||
return cfg, nil
|
||||
}
|
||||
|
||||
func (cfg *apiConfig) getAPIResponse(path string) ([]byte, error) {
|
||||
if len(cfg.filtersQueryArg) > 0 {
|
||||
func (cfg *apiConfig) getAPIResponse(path, filtersQueryArg string) ([]byte, error) {
|
||||
if len(filtersQueryArg) > 0 {
|
||||
separator := "?"
|
||||
if strings.Contains(path, "?") {
|
||||
separator = "&"
|
||||
|
|
|
@ -28,7 +28,7 @@ func getNetworksLabelsByNetworkID(cfg *apiConfig) (map[string]*promutils.Labels,
|
|||
}
|
||||
|
||||
func getNetworks(cfg *apiConfig) ([]network, error) {
|
||||
resp, err := cfg.getAPIResponse("/networks")
|
||||
resp, err := cfg.getAPIResponse("/networks", "")
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("cannot query dockerswarm api for networks: %w", err)
|
||||
}
|
||||
|
|
|
@ -47,7 +47,11 @@ func getNodesLabels(cfg *apiConfig) ([]*promutils.Labels, error) {
|
|||
}
|
||||
|
||||
func getNodes(cfg *apiConfig) ([]node, error) {
|
||||
resp, err := cfg.getAPIResponse("/nodes")
|
||||
filtersQueryArg := ""
|
||||
if cfg.role == "nodes" {
|
||||
filtersQueryArg = cfg.filtersQueryArg
|
||||
}
|
||||
resp, err := cfg.getAPIResponse("/nodes", filtersQueryArg)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("cannot query dockerswarm api for nodes: %w", err)
|
||||
}
|
||||
|
|
|
@ -59,7 +59,11 @@ func getServicesLabels(cfg *apiConfig) ([]*promutils.Labels, error) {
|
|||
}
|
||||
|
||||
func getServices(cfg *apiConfig) ([]service, error) {
|
||||
data, err := cfg.getAPIResponse("/services")
|
||||
filtersQueryArg := ""
|
||||
if cfg.role == "services" {
|
||||
filtersQueryArg = cfg.filtersQueryArg
|
||||
}
|
||||
data, err := cfg.getAPIResponse("/services", filtersQueryArg)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("cannot query dockerswarm api for services: %w", err)
|
||||
}
|
||||
|
|
|
@ -62,7 +62,11 @@ func getTasksLabels(cfg *apiConfig) ([]*promutils.Labels, error) {
|
|||
}
|
||||
|
||||
func getTasks(cfg *apiConfig) ([]task, error) {
|
||||
resp, err := cfg.getAPIResponse("/tasks")
|
||||
filtersQueryArg := ""
|
||||
if cfg.role == "tasks" {
|
||||
filtersQueryArg = cfg.filtersQueryArg
|
||||
}
|
||||
resp, err := cfg.getAPIResponse("/tasks", filtersQueryArg)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("cannot query dockerswarm api for tasks: %w", err)
|
||||
}
|
||||
|
|
|
@ -217,11 +217,14 @@ type scrapeWork struct {
|
|||
// equals to or exceeds -promscrape.minResponseSizeForStreamParse
|
||||
lastScrapeCompressed []byte
|
||||
|
||||
// lastErrLogTimestamp is the timestamp in unix seconds of the last logged scrape error
|
||||
lastErrLogTimestamp uint64
|
||||
// nextErrorLogTime is the timestamp in millisecond when the next scrape error should be logged.
|
||||
nextErrorLogTime int64
|
||||
|
||||
// errsSuppressedCount is the number of suppressed scrape errors since lastErrLogTimestamp
|
||||
errsSuppressedCount int
|
||||
// failureRequestsCount is the number of suppressed scrape errors during the last suppressScrapeErrorsDelay
|
||||
failureRequestsCount int
|
||||
|
||||
// successRequestsCount is the number of success requests during the last suppressScrapeErrorsDelay
|
||||
successRequestsCount int
|
||||
}
|
||||
|
||||
func (sw *scrapeWork) loadLastScrape() string {
|
||||
|
@ -355,21 +358,26 @@ func (sw *scrapeWork) logError(s string) {
|
|||
|
||||
func (sw *scrapeWork) scrapeAndLogError(scrapeTimestamp, realTimestamp int64) {
|
||||
err := sw.scrapeInternal(scrapeTimestamp, realTimestamp)
|
||||
if *suppressScrapeErrors {
|
||||
return
|
||||
}
|
||||
if err == nil {
|
||||
sw.successRequestsCount++
|
||||
return
|
||||
}
|
||||
d := time.Duration(fasttime.UnixTimestamp()-sw.lastErrLogTimestamp) * time.Second
|
||||
if *suppressScrapeErrors || d < *suppressScrapeErrorsDelay {
|
||||
sw.errsSuppressedCount++
|
||||
sw.failureRequestsCount++
|
||||
if sw.nextErrorLogTime == 0 {
|
||||
sw.nextErrorLogTime = realTimestamp + suppressScrapeErrorsDelay.Milliseconds()
|
||||
}
|
||||
if realTimestamp < sw.nextErrorLogTime {
|
||||
return
|
||||
}
|
||||
err = fmt.Errorf("cannot scrape %q (job %q, labels %s): %w", sw.Config.ScrapeURL, sw.Config.Job(), sw.Config.Labels.String(), err)
|
||||
if sw.errsSuppressedCount > 0 {
|
||||
err = fmt.Errorf("%w; %d similar errors suppressed during the last %.1f seconds", err, sw.errsSuppressedCount, d.Seconds())
|
||||
}
|
||||
logger.Warnf("%s", err)
|
||||
sw.lastErrLogTimestamp = fasttime.UnixTimestamp()
|
||||
sw.errsSuppressedCount = 0
|
||||
totalRequests := sw.failureRequestsCount + sw.successRequestsCount
|
||||
logger.Warnf("cannot scrape target %q (%s) %d out of %d times during -promscrape.suppressScrapeErrorsDelay=%s; the last error: %s",
|
||||
sw.Config.ScrapeURL, sw.Config.Labels.String(), sw.failureRequestsCount, totalRequests, *suppressScrapeErrorsDelay, err)
|
||||
sw.nextErrorLogTime = realTimestamp + suppressScrapeErrorsDelay.Milliseconds()
|
||||
sw.failureRequestsCount = 0
|
||||
sw.successRequestsCount = 0
|
||||
}
|
||||
|
||||
var (
|
||||
|
|
Loading…
Reference in a new issue